summaryrefslogtreecommitdiffstats
path: root/ansible_collections/netapp/ontap/tests/unit
diff options
context:
space:
mode:
Diffstat (limited to 'ansible_collections/netapp/ontap/tests/unit')
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/compat/__init__.py0
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/compat/builtins.py34
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/compat/mock.py122
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/compat/unittest.py44
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/framework/mock_rest_and_zapi_requests.py288
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/framework/rest_factory.py107
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/framework/test_mock_rest_and_zapi_requests.py189
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/framework/test_mock_rest_and_zapi_requests_no_netapp_lib.py94
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/framework/test_rest_factory.py44
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/framework/test_zapi_factory.py108
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/framework/ut_utilities.py31
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/framework/zapi_factory.py148
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/filter/test_na_filter_iso8601.py66
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/ansible_mocks.py181
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_netapp.py182
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_netapp_invoke_elem.py154
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_netapp_ipaddress.py95
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_netapp_module.py885
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_netapp_rest.py586
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_netapp_send_request.py271
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_netapp_sf.py85
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_netapp_zapi.py374
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_response_helper.py156
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_rest_application.py346
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_rest_generic.py492
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_rest_owning_resource.py98
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_rest_volume.py233
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_rest_vserver.py120
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/.pytest_cache/.gitignore2
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/.pytest_cache/CACHEDIR.TAG4
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/.pytest_cache/README.md8
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/.pytest_cache/v/cache/lastfailed3
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/.pytest_cache/v/cache/nodeids6
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/.pytest_cache/v/cache/stepwise1
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_active_directory.py311
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_active_directory_domain_controllers.py177
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_aggregate.py627
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_aggregate_rest.py616
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_autosupport.py264
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_autosupport_invoke.py103
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_bgp_peer_group.py211
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_broadcast_domain.py808
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cg_snapshot.py81
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cifs.py464
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cifs_acl.py412
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cifs_local_group.py218
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cifs_local_group_member.py338
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cifs_local_user.py204
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cifs_local_user_modify.py223
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cifs_local_user_set_password.py66
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cifs_local_user_set_password_rest.py101
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cifs_server.py770
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cluster.py688
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cluster_ha.py140
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cluster_peer.py305
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_command.py246
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_debug.py344
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_disk_options.py151
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_disks.py822
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_dns.py388
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_domain_tunnel.py145
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_efficiency_policy.py422
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ems_destination.py226
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ems_filter.py308
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_export_policy.py277
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_export_policy_rule.py404
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_export_policy_rule_rest.py387
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_fcp_rest.py231
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_fdsd.py136
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_fdsp.py134
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_fdss.py102
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_file_directory_policy.py136
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_file_security_permissions.py647
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_file_security_permissions_acl.py331
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_firewall_policy.py263
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_firmware_upgrade.py891
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_flexcache.py838
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_fpolicy_event.py338
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_fpolicy_ext_engine.py395
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_fpolicy_policy.py339
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_fpolicy_scope.py351
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_fpolicy_status.py286
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_igroup.py415
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_igroup_initiator.py256
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_info.py738
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_interface.py1778
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ipspace.py189
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_iscsi.py339
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_iscsi_security.py195
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_job_schedule.py451
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_kerberos_interface.py107
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_kerberos_realm.py213
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ldap_client.py481
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_license.py432
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_license_nlf.py461
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_local_hosts.py178
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_log_forward.py343
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_login_messages.py332
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_lun.py308
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_lun_app_rest.py584
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_lun_copy.py113
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_lun_map.py159
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_lun_map_reporting_nodes.py170
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_lun_map_rest.py200
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_lun_rest.py558
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_mcc_mediator.py124
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_metrocluster.py117
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_metrocluster_dr_group.py164
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_motd.py164
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_name_mappings.py282
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_name_service_switch.py181
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ndmp.py196
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_net_ifgrp.py737
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_net_port.py331
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_net_routes.py359
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_net_subnet.py275
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_net_vlan.py252
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_nfs.py338
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_nfs_rest.py324
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_node.py222
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ntfs_dacl.py232
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ntfs_sd.py189
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ntp.py143
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ntp_key.py141
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_nvme.py185
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_nvme_namespace.py168
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_nvme_namespace_rest.py121
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_nvme_rest.py131
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_nvme_subsystem.py225
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_nvme_subsystem_rest.py256
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_object_store.py538
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_partitions.py515
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ports.py864
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_portset.py390
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_publickey.py471
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_qos_adaptive_policy_group.py313
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_qos_policy_group.py578
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_qtree.py404
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_quota_policy.py174
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_quotas.py853
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_rest_cli.py128
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_rest_info.py1195
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_restit.py346
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_s3_buckets.py739
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_s3_groups.py319
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_s3_policies.py220
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_s3_services.py176
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_s3_users.py194
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_security_certificates.py509
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_security_config.py254
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_security_ipsec_ca_certificate.py140
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_security_ipsec_config.py87
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_security_ipsec_policy.py268
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_security_key_manager.py804
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_security_ssh.py164
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_service_policy.py402
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_service_processor_network.py296
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_snaplock_clock.py228
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_snapmirror.py1894
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_snapmirror_policy.py1269
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_snapshot.py363
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_snapshot_policy.py658
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_snapshot_policy_rest.py481
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_snmp.py158
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_snmp_traphosts.py153
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_software_update.py1124
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_storage_auto_giveback.py320
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_storage_failover.py350
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_svm.py1251
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_template.py86
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ucadapter.py173
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_unix_group.py545
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_unix_user.py465
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_user.py744
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_user_dicts.py589
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_user_role.py139
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_user_role_rest.py647
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_volume.py2011
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_volume_autosize.py367
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_volume_clone.py210
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_volume_clone_rest.py244
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_volume_efficiency.py346
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_volume_rest.py1440
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_volume_snaplock.py131
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_vscan.py200
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_vscan_on_access_policy.py348
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_vscan_on_demand_task.py135
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_vscan_on_demand_task_rest.py184
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_vscan_scanner_pool.py154
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_vserver_audit.py354
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_vserver_cifs_security.py111
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_vserver_peer.py440
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_vserver_peer_permissions.py226
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_wait_for_condition.py485
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_wwpn_alias.py192
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_zapit.py255
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_ontap_fdspt.py164
-rw-r--r--ansible_collections/netapp/ontap/tests/unit/requirements.txt7
198 files changed, 68061 insertions, 0 deletions
diff --git a/ansible_collections/netapp/ontap/tests/unit/compat/__init__.py b/ansible_collections/netapp/ontap/tests/unit/compat/__init__.py
new file mode 100644
index 000000000..e69de29bb
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/compat/__init__.py
diff --git a/ansible_collections/netapp/ontap/tests/unit/compat/builtins.py b/ansible_collections/netapp/ontap/tests/unit/compat/builtins.py
new file mode 100644
index 000000000..feef5d758
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/compat/builtins.py
@@ -0,0 +1,34 @@
+# (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+#
+# Compat for python2.7
+#
+
+# One unittest needs to import builtins via __import__() so we need to have
+# the string that represents it
+try:
+ # pylint: disable=unused-import
+ import __builtin__
+except ImportError:
+ BUILTINS = 'builtins'
+else:
+ BUILTINS = '__builtin__'
diff --git a/ansible_collections/netapp/ontap/tests/unit/compat/mock.py b/ansible_collections/netapp/ontap/tests/unit/compat/mock.py
new file mode 100644
index 000000000..0972cd2e8
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/compat/mock.py
@@ -0,0 +1,122 @@
+# (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+'''
+Compat module for Python3.x's unittest.mock module
+'''
+import sys
+
+# Python 2.7
+
+# Note: Could use the pypi mock library on python3.x as well as python2.x. It
+# is the same as the python3 stdlib mock library
+
+try:
+ # Allow wildcard import because we really do want to import all of mock's
+ # symbols into this compat shim
+ # pylint: disable=wildcard-import,unused-wildcard-import
+ from unittest.mock import *
+except ImportError:
+ # Python 2
+ # pylint: disable=wildcard-import,unused-wildcard-import
+ try:
+ from mock import *
+ except ImportError:
+ print('You need the mock library installed on python2.x to run tests')
+
+
+# Prior to 3.4.4, mock_open cannot handle binary read_data
+if sys.version_info >= (3,) and sys.version_info < (3, 4, 4):
+ file_spec = None
+
+ def _iterate_read_data(read_data):
+ # Helper for mock_open:
+ # Retrieve lines from read_data via a generator so that separate calls to
+ # readline, read, and readlines are properly interleaved
+ sep = b'\n' if isinstance(read_data, bytes) else '\n'
+ data_as_list = [l + sep for l in read_data.split(sep)]
+
+ if data_as_list[-1] == sep:
+ # If the last line ended in a newline, the list comprehension will have an
+ # extra entry that's just a newline. Remove this.
+ data_as_list = data_as_list[:-1]
+ else:
+ # If there wasn't an extra newline by itself, then the file being
+ # emulated doesn't have a newline to end the last line remove the
+ # newline that our naive format() added
+ data_as_list[-1] = data_as_list[-1][:-1]
+
+ for line in data_as_list:
+ yield line
+
+ def mock_open(mock=None, read_data=''):
+ """
+ A helper function to create a mock to replace the use of `open`. It works
+ for `open` called directly or used as a context manager.
+
+ The `mock` argument is the mock object to configure. If `None` (the
+ default) then a `MagicMock` will be created for you, with the API limited
+ to methods or attributes available on standard file handles.
+
+ `read_data` is a string for the `read` methoddline`, and `readlines` of the
+ file handle to return. This is an empty string by default.
+ """
+ def _readlines_side_effect(*args, **kwargs):
+ if handle.readlines.return_value is not None:
+ return handle.readlines.return_value
+ return list(_data)
+
+ def _read_side_effect(*args, **kwargs):
+ if handle.read.return_value is not None:
+ return handle.read.return_value
+ return type(read_data)().join(_data)
+
+ def _readline_side_effect():
+ if handle.readline.return_value is not None:
+ while True:
+ yield handle.readline.return_value
+ for line in _data:
+ yield line
+
+ global file_spec
+ if file_spec is None:
+ import _io
+ file_spec = list(set(dir(_io.TextIOWrapper)).union(set(dir(_io.BytesIO))))
+
+ if mock is None:
+ mock = MagicMock(name='open', spec=open)
+
+ handle = MagicMock(spec=file_spec)
+ handle.__enter__.return_value = handle
+
+ _data = _iterate_read_data(read_data)
+
+ handle.write.return_value = None
+ handle.read.return_value = None
+ handle.readline.return_value = None
+ handle.readlines.return_value = None
+
+ handle.read.side_effect = _read_side_effect
+ handle.readline.side_effect = _readline_side_effect()
+ handle.readlines.side_effect = _readlines_side_effect
+
+ mock.return_value = handle
+ return mock
diff --git a/ansible_collections/netapp/ontap/tests/unit/compat/unittest.py b/ansible_collections/netapp/ontap/tests/unit/compat/unittest.py
new file mode 100644
index 000000000..73a20cf8c
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/compat/unittest.py
@@ -0,0 +1,44 @@
+# (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com>
+#
+# This file is part of Ansible
+#
+# Ansible is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# Ansible is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
+
+# Make coding more python3-ish
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+'''
+Compat module for Python2.7's unittest module
+'''
+
+import sys
+
+import pytest
+
+# Allow wildcard import because we really do want to import all of
+# unittests's symbols into this compat shim
+# pylint: disable=wildcard-import,unused-wildcard-import
+if sys.version_info < (2, 7):
+ try:
+ # Need unittest2 on python2.6
+ from unittest2 import *
+ except ImportError:
+ print('You need unittest2 installed on python2.6.x to run tests')
+
+ class TestCase:
+ """ skip everything """
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as unittest2 may not be available')
+else:
+ from unittest import *
diff --git a/ansible_collections/netapp/ontap/tests/unit/framework/mock_rest_and_zapi_requests.py b/ansible_collections/netapp/ontap/tests/unit/framework/mock_rest_and_zapi_requests.py
new file mode 100644
index 000000000..a920eeab6
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/framework/mock_rest_and_zapi_requests.py
@@ -0,0 +1,288 @@
+
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+# Author: Laurent Nicolas, laurentn@netapp.com
+
+""" unit tests for Ansible modules for ONTAP:
+ fixture to mock REST send_request and ZAPI invoke_elem to trap all network calls
+
+ Note: errors are reported as exception. Additional details are printed to the output.
+ pytest suppresses the output unless -s is used.
+"""
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+from copy import deepcopy
+from functools import partial
+import inspect
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+
+# set this to true to print messages about the fixture itself.
+DEBUG = False
+# if true, an error is raised if register_responses was not called.
+FORCE_REGISTRATION = False
+
+
+@pytest.fixture(autouse=True)
+def patch_request_and_invoke(request):
+ if DEBUG:
+ print('entering patch_request_and_invoke fixture for', request.function)
+ function_name = request.function.__name__
+
+ with patch('time.sleep') as mock_time_sleep:
+ mock_time_sleep.side_effect = partial(_mock_time_sleep, function_name)
+ with patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request') as mock_send_request:
+ mock_send_request.side_effect = partial(_mock_netapp_send_request, function_name)
+ if netapp_utils.has_netapp_lib():
+ with patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapZAPICx.invoke_elem') as mock_invoke_elem:
+ mock_invoke_elem.side_effect = partial(_mock_netapp_invoke_elem, function_name)
+ yield mock_send_request, mock_invoke_elem
+ else:
+ yield mock_send_request
+
+ # This part is executed after the test completes
+ _patch_request_and_invoke_exit_checks(function_name)
+
+
+def register_responses(responses, function_name=None):
+ ''' When patching, the pytest request identifies the UT test function
+ if the registration is happening in a helper function, function_name needs to identify the calling test function
+ EG:
+ test_me():
+ for x in range:
+ check_something()
+ if the registration happens in check_something, function_name needs to be set to test_me (as a string)
+ '''
+ caller = inspect.currentframe().f_back.f_code.co_name
+ if DEBUG:
+ print('register_responses - caller:', caller, 'function_name:', function_name)
+ if function_name is not None and function_name != caller and (caller.startswith('test') or not function_name.startswith('test')):
+ raise KeyError('inspect reported a different name: %s, received: %s' % (caller, function_name))
+ if function_name is None:
+ function_name = caller
+ fixed_records = []
+ for record in responses:
+ try:
+ expected_method, expected_api, response = record
+ if expected_method not in ['ZAPI', 'GET', 'OPTIONS', 'POST', 'PATCH', 'DELETE']:
+ raise KeyError('Unexpected method %s in %s for function: %s' % (expected_method, record, function_name))
+ except ValueError:
+ expected_method = 'ZAPI'
+ expected_api, response = record
+ if expected_method == 'ZAPI':
+ # sanity checks for netapp-lib are deferred until the test is actually run
+ response, valid = response
+ if valid != 'valid':
+ raise ImportError(response)
+ # some modules modify the record in place - keep the original intact
+ fixed_records.append((expected_method, expected_api, deepcopy(response)))
+ _RESPONSES[function_name] = fixed_records
+
+
+def get_mock_record(function_name=None):
+ if function_name is None:
+ function_name = inspect.currentframe().f_back.f_code.co_name
+ return _REQUESTS.get(function_name)
+
+
+def print_requests(function_name=None):
+ if function_name is None:
+ function_name = inspect.currentframe().f_back.f_code.co_name
+ if function_name not in _REQUESTS:
+ print('No request processed for %s' % function_name)
+ return
+ print('--- %s - processed requests ---' % function_name)
+ for record in _REQUESTS[function_name].get_requests():
+ print(record)
+ print('--- %s - end of processed requests ---' % function_name)
+
+
+def print_requests_and_responses(function_name=None):
+ if function_name is None:
+ function_name = inspect.currentframe().f_back.f_code.co_name
+ if function_name not in _REQUESTS:
+ print('No request processed for %s' % function_name)
+ return
+ print('--- %s - processed requests and responses---' % function_name)
+ for record in _REQUESTS[function_name].get_responses():
+ print(record)
+ print('--- %s - end of processed requests and responses---' % function_name)
+
+
+class MockCalls:
+ '''record calls'''
+ def __init__(self, function_name):
+ self.function_name = function_name
+ self.requests = []
+ self.responses = []
+
+ def get_responses(self, method=None, api=None):
+ for record in self.responses:
+ if ((method is None or record.get('method') == method)
+ and (api is None or record.get('api') == api)):
+ yield record
+
+ def get_requests(self, method=None, api=None, response=None):
+ for record in self.requests:
+ if ((method is None or record.get('method') == method)
+ and (api is None or record.get('api') == api)
+ and (response is None or record.get('response') == response)):
+ yield record
+
+ def is_record_in_json(self, record, method, api, response=None):
+ for request in self.get_requests(method, api, response):
+ json = request.get('json')
+ if json and self._record_in_dict(record, json):
+ return True
+ return False
+
+ def is_zapi_called(self, zapi):
+ return any(self.get_requests('ZAPI', zapi))
+
+ def get_request(self, sequence):
+ return self.requests[sequence]
+
+ def is_text_in_zapi_request(self, text, sequence, present=True):
+ found = text in str(self.get_request(sequence)['zapi_request'])
+ if found != present:
+ not_expected = 'not ' if present else ''
+ print('Error: %s %sfound in %s' % (text, not_expected, self.get_request(sequence)['zapi_request']))
+ return found
+
+ # private methods
+
+ def __del__(self):
+ if DEBUG:
+ print('Deleting MockCalls instance for', self.function_name)
+
+ def _record_response(self, method, api, response):
+ print(method, api, response)
+ if method == 'ZAPI':
+ try:
+ response = response.to_string()
+ except AttributeError:
+ pass
+ self.responses.append((method, api, response))
+
+ @staticmethod
+ def _record_in_dict(record, adict):
+ for key, value in record.items():
+ if key not in adict:
+ print('key: %s not found in %s' % (key, adict))
+ return False
+ if value != adict[key]:
+ print('Values differ for key: %s: - %s vs %s' % (key, value, adict[key]))
+ return False
+ return True
+
+ def _record_rest_request(self, method, api, params, json, headers, files):
+ record = {
+ 'params': params,
+ 'json': json,
+ 'headers': headers,
+ 'files': files,
+ }
+ self._record_request(method, api, record)
+
+ def _record_zapi_request(self, zapi, na_element, enable_tunneling):
+ try:
+ zapi_request = na_element.to_string()
+ except AttributeError:
+ zapi_request = na_element
+ record = {
+ 'na_element': na_element,
+ 'zapi_request': zapi_request,
+ 'tunneling': enable_tunneling
+ }
+ self._record_request('ZAPI', zapi, record)
+
+ def _record_request(self, method, api, record=None):
+ record = record or {}
+ record['function'] = self.function_name
+ record['method'] = method
+ record['api'] = api
+ self.requests.append(record)
+
+ def _get_response(self, method, api):
+ response = _get_response(self.function_name, method, api)
+ self._record_response(method, api, response)
+ return response
+
+
+# private variables and methods
+
+_REQUESTS = {}
+_RESPONSES = {}
+
+
+def _get_response(function, method, api):
+ if function not in _RESPONSES:
+ print('Error: make sure to add entries for %s in RESPONSES.' % function)
+ raise KeyError('function %s is not registered - %s %s' % (function, method, api))
+ if not _RESPONSES[function]:
+ print('Error: exhausted all entries for %s in RESPONSES, received request for %s %s' % (function, method, api))
+ print_requests(function)
+ raise KeyError('function %s received unhandled call %s %s' % (function, method, api))
+ expected_method, expected_api, response = _RESPONSES[function][0]
+ if expected_method != method or expected_api not in ['*', api]:
+ print_requests(function)
+ raise KeyError('function %s received an unexpected call %s %s, expecting %s %s' % (function, method, api, expected_method, expected_api))
+ _RESPONSES[function].pop(0)
+ if isinstance(response, Exception):
+ raise response
+ # some modules modify the record in place - keep the original intact
+ return deepcopy(response)
+
+
+def _get_or_create_mock_record(function_name):
+ if function_name not in _REQUESTS:
+ _REQUESTS[function_name] = MockCalls(function_name)
+ return _REQUESTS[function_name]
+
+
+def _mock_netapp_send_request(function_name, method, api, params, json=None, headers=None, files=None):
+ if DEBUG:
+ print('Inside _mock_netapp_send_request')
+ mock_calls = _get_or_create_mock_record(function_name)
+ mock_calls._record_rest_request(method, api, params, json, headers, files)
+ return mock_calls._get_response(method, api)
+
+
+def _mock_netapp_invoke_elem(function_name, na_element, enable_tunneling=False):
+ if DEBUG:
+ print('Inside _mock_netapp_invoke_elem')
+ zapi = na_element.get_name()
+ mock_calls = _get_or_create_mock_record(function_name)
+ mock_calls._record_zapi_request(zapi, na_element, enable_tunneling)
+ return mock_calls._get_response('ZAPI', zapi)
+
+
+def _mock_time_sleep(function_name, duration):
+ if DEBUG:
+ print('Inside _mock_time_sleep for %s' % function_name)
+ if duration > 0.1:
+ # the IDE or debug mode may add a small timer - only report for "large" value
+ raise KeyError("time.sleep(%s) was called - add: @patch('time.sleep')" % duration)
+
+
+def _patch_request_and_invoke_exit_checks(function_name):
+ # action to be performed afther a test is complete
+ if DEBUG:
+ print('exiting patch_request_and_invoke fixture for', function_name)
+ if FORCE_REGISTRATION:
+ assert function_name in _RESPONSES, 'Error: responses for ZAPI invoke or REST send requests are not registered.'
+ # make sure all expected requests were consumed
+ if _RESPONSES.get(function_name):
+ print('Error: not all responses were processed. It is expected if the test failed.')
+ print('Error: remaining responses: %s' % _RESPONSES[function_name])
+ msg = 'Error: not all responses were processed. Use -s to see detailed error. '\
+ 'Ignore this error if there is an earlier error in the test.'
+ assert not _RESPONSES.get(function_name), msg
+ if function_name in _RESPONSES:
+ del _RESPONSES[function_name]
+ if function_name in _REQUESTS:
+ del _REQUESTS[function_name]
diff --git a/ansible_collections/netapp/ontap/tests/unit/framework/rest_factory.py b/ansible_collections/netapp/ontap/tests/unit/framework/rest_factory.py
new file mode 100644
index 000000000..dfda97c03
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/framework/rest_factory.py
@@ -0,0 +1,107 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+# Author: Laurent Nicolas, laurentn@netapp.com
+
+""" unit tests for Ansible modules for ONTAP:
+ utility to build REST responses and errors, and register them to use them in testcases.
+
+ 1) at the module level, define the REST responses:
+ SRR = rest_responses() if you're only interested in the default ones: 'empty', 'error', ...
+ SRR = rest_responses(dict) to use the default ones and augment them:
+ a key identifies a response name, and the value is a tuple.
+
+ 3) in each test function, create a list of (event, response) using rest_response
+ def test_create_aggr():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('POST', 'storage/aggregates', SRR['empty_good'])
+ ])
+
+ See ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_aggregate_rest.py
+ for an example.
+"""
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+
+JOB_GET_API = ' cluster/jobs/94b6e6a7-d426-11eb-ac81-00505690980f'
+
+
+def _build_job(state):
+ return (200, {
+ "uuid": "f03ccbb6-d8bb-11eb-ac81-00505690980f",
+ "description": "job results with state: %s" % state,
+ "state": state,
+ "message": "job reported %s" % state
+ }, None)
+
+
+# name: (html_code, dict, None or error string)
+# dict is translated into an xml structure, num_records is None or an integer >= 0
+_DEFAULT_RESPONSES = {
+ # common responses
+ 'is_rest': (200, {}, None),
+ 'is_rest_95': (200, dict(version=dict(generation=9, major=5, minor=0, full='dummy_9_5_0')), None),
+ 'is_rest_96': (200, dict(version=dict(generation=9, major=6, minor=0, full='dummy_9_6_0')), None),
+ 'is_rest_97': (200, dict(version=dict(generation=9, major=7, minor=0, full='dummy_9_7_0')), None),
+ 'is_rest_9_7_5': (200, dict(version=dict(generation=9, major=7, minor=5, full='dummy_9_7_5')), None),
+ 'is_rest_9_8_0': (200, dict(version=dict(generation=9, major=8, minor=0, full='dummy_9_8_0')), None),
+ 'is_rest_9_9_0': (200, dict(version=dict(generation=9, major=9, minor=0, full='dummy_9_9_0')), None),
+ 'is_rest_9_9_1': (200, dict(version=dict(generation=9, major=9, minor=1, full='dummy_9_9_1')), None),
+ 'is_rest_9_10_1': (200, dict(version=dict(generation=9, major=10, minor=1, full='dummy_9_10_1')), None),
+ 'is_rest_9_11_0': (200, dict(version=dict(generation=9, major=11, minor=0, full='dummy_9_11_0')), None),
+ 'is_rest_9_11_1': (200, dict(version=dict(generation=9, major=11, minor=1, full='dummy_9_11_1')), None),
+ 'is_rest_9_12_0': (200, dict(version=dict(generation=9, major=12, minor=0, full='dummy_9_12_0')), None),
+ 'is_rest_9_12_1': (200, dict(version=dict(generation=9, major=12, minor=1, full='dummy_9_12_1')), None),
+ 'is_rest_9_13_1': (200, dict(version=dict(generation=9, major=13, minor=1, full='dummy_9_13_1')), None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'success': (200, {}, None),
+ 'success_with_job_uuid': (200, {'job': {'_links': {'self': {'href': '/api/%s' % JOB_GET_API}}}}, None),
+ 'end_of_sequence': (500, None, "Unexpected call to send_request"),
+ 'empty_records': (200, {'records': []}, None),
+ 'zero_records': (200, {'num_records': 0}, None),
+ 'one_record': (200, {'num_records': 1}, None),
+ 'one_vserver_record': (200, {'num_records': 1, 'records': [{'svm': {'name': 'svm_name', 'uuid': 'svm_uuid'}}]}, None),
+ 'generic_error': (400, None, "Expected error"),
+ 'error_record': (400, None, {'code': 6, 'message': 'Expected error'}),
+ 'job_generic_response_success': _build_job('success'),
+ 'job_generic_response_running': _build_job('running'),
+ 'job_generic_response_failure': _build_job('failure'),
+}
+
+
+def rest_error_message(error, api=None, extra='', got=None):
+ if got is None:
+ got = 'got Expected error.'
+ msg = ('%s: ' % error) if error else ''
+ msg += ('calling: %s: ' % api) if api else ''
+ msg += got
+ msg += extra
+ return msg
+
+
+class rest_responses:
+ ''' return an object that behaves like a read-only dictionary
+ supports [key] to read an entry, and 'in' keyword to check key existence.
+ '''
+ def __init__(self, adict=None, allow_override=True):
+ self.responses = dict(_DEFAULT_RESPONSES.items())
+ if adict:
+ for key, value in adict.items():
+ if not allow_override and key in self.responses:
+ raise KeyError('duplicated key: %s' % key)
+ self.responses[key] = value
+
+ def _get_response(self, name):
+ try:
+ return self.responses[name]
+ except KeyError:
+ raise KeyError('%s not registered, list of valid keys: %s' % (name, self.responses.keys()))
+
+ def __getitem__(self, name):
+ return self._get_response(name)
+
+ def __contains__(self, name):
+ return name in self.responses
diff --git a/ansible_collections/netapp/ontap/tests/unit/framework/test_mock_rest_and_zapi_requests.py b/ansible_collections/netapp/ontap/tests/unit/framework/test_mock_rest_and_zapi_requests.py
new file mode 100644
index 000000000..bd94b027a
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/framework/test_mock_rest_and_zapi_requests.py
@@ -0,0 +1,189 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+""" unit tests for Ansible module unit test fixture amd helper mock_rest_and_zapi_requests """
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import pytest
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+import ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests as uut
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke as patch_fixture
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import zapi_responses, build_zapi_response
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib') # pragma: no cover
+
+# REST API canned responses when mocking send_request.
+# The rest_factory provides default responses shared across testcases.
+SRR = rest_responses()
+# ZAPI canned responses when mocking invoke_elem.
+# The zapi_factory provides default responses shared across testcases.
+ZRR = zapi_responses()
+
+uut.DEBUG = True
+
+
+def test_register_responses():
+ uut.register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('get-version', build_zapi_response({})),
+ ('get-bad-zapi', ('BAD_ZAPI', 'valid'))
+ ], 'test_register_responses')
+ assert uut._get_response('test_register_responses', 'GET', 'cluster') == SRR['is_rest']
+ assert uut._get_response('test_register_responses', 'ZAPI', 'get-version').to_string() == build_zapi_response({})[0].to_string()
+ # If to_string() is not available, the ZAPI is registered as is.
+ assert uut._get_response('test_register_responses', 'ZAPI', 'get-bad-zapi') == 'BAD_ZAPI'
+
+
+def test_negative_register_responses():
+ with pytest.raises(KeyError) as exc:
+ uut.register_responses([
+ ('MOVE', 'cluster', SRR['is_rest']),
+ ], 'not_me')
+ assert exc.value.args[0] == 'inspect reported a different name: test_negative_register_responses, received: not_me'
+
+ with pytest.raises(KeyError) as exc:
+ uut.register_responses([
+ ('MOVE', 'cluster', SRR['is_rest']),
+ ])
+ assert 'Unexpected method MOVE' in exc.value.args[0]
+
+
+def test_negative_get_response():
+ with pytest.raises(KeyError) as exc:
+ uut._get_response('test_negative_get_response', 'POST', 'cluster')
+ assert exc.value.args[0] == 'function test_negative_get_response is not registered - POST cluster'
+
+ uut.register_responses([
+ ('GET', 'cluster', SRR['is_rest'])])
+ with pytest.raises(KeyError) as exc:
+ uut._get_response('test_negative_get_response', 'POST', 'cluster')
+ assert exc.value.args[0] == 'function test_negative_get_response received an unexpected call POST cluster, expecting GET cluster'
+
+ uut._get_response('test_negative_get_response', 'GET', 'cluster')
+ with pytest.raises(KeyError) as exc:
+ uut._get_response('test_negative_get_response', 'POST', 'cluster')
+ assert exc.value.args[0] == 'function test_negative_get_response received unhandled call POST cluster'
+
+
+def test_record_rest_request():
+ function_name = 'testme'
+ method = 'METHOD'
+ api = 'API'
+ params = 'PARAMS'
+ json = {'record': {'key': 'value'}}
+ headers = {}
+ files = {'data': 'value'}
+ calls = uut.MockCalls(function_name)
+ calls._record_rest_request(method, api, params, json, headers, files)
+ uut.print_requests(function_name)
+ assert len([calls.get_requests(method, api)]) == 1
+ assert calls.is_record_in_json({'record': {'key': 'value'}}, 'METHOD', 'API')
+ assert not calls.is_record_in_json({'record': {'key': 'value1'}}, 'METHOD', 'API')
+ assert not calls.is_record_in_json({'key': 'value'}, 'METHOD', 'API')
+
+
+def test_record_zapi_request():
+ function_name = 'testme'
+ api = 'API'
+ zapi = build_zapi_response({})
+ tunneling = False
+ calls = uut.MockCalls(function_name)
+ calls._record_zapi_request(api, zapi, tunneling)
+ uut.print_requests(function_name)
+ assert len([calls.get_requests('ZAPI', api)]) == 1
+ assert calls.is_zapi_called('API')
+ assert not calls.is_zapi_called('version')
+
+
+def test_negative_record_zapi_request():
+ function_name = 'testme'
+ api = 'API'
+ zapi = 'STRING' # AttributeError is handled in the function
+ tunneling = False
+ calls = uut.MockCalls(function_name)
+ calls._record_zapi_request(api, zapi, tunneling)
+ uut.print_requests(function_name)
+ assert len([calls.get_requests('ZAPI', api)]) == 1
+
+
+def test_negative_record_zapi_response():
+ function_name = 'testme'
+ api = 'API'
+ zapi = 'STRING' # AttributeError is handled in the function
+ calls = uut.MockCalls(function_name)
+ calls._record_response('ZAPI', api, zapi)
+ uut.print_requests_and_responses(function_name)
+ assert len([calls.get_responses('ZAPI', api)]) == 1
+
+
+def test_mock_netapp_send_request():
+ function_name = 'test_mock_netapp_send_request'
+ method = 'GET'
+ api = 'cluster'
+ params = 'PARAMS'
+ uut.register_responses([
+ ('GET', 'cluster', SRR['is_rest'])])
+ response = uut._mock_netapp_send_request(function_name, method, api, params)
+ assert response == SRR['is_rest']
+
+
+def test_mock_netapp_invoke_elem():
+ function_name = 'test_mock_netapp_invoke_elem'
+ method = 'ZAPI'
+ api = 'cluster'
+ params = 'PARAMS'
+ zapi = netapp_utils.zapi.NaElement.create_node_with_children('get-version')
+ uut.register_responses([
+ ('get-version', build_zapi_response({}))])
+ response = uut._mock_netapp_invoke_elem(function_name, zapi)
+ assert response.to_string() == build_zapi_response({})[0].to_string()
+
+
+def test_print_requests_and_responses():
+ uut.print_requests_and_responses()
+
+
+def test_fixture(patch_fixture):
+ uut.register_responses([
+ ('get-version', build_zapi_response({}))])
+ mock_sr, mock_invoke = patch_fixture
+ cx = netapp_utils.OntapZAPICx()
+ cx.invoke_elem(netapp_utils.zapi.NaElement.create_node_with_children('get-version'))
+ assert 'test_fixture' in uut._RESPONSES
+ assert 'test_fixture' in uut._REQUESTS
+ uut.print_requests()
+ uut.print_requests_and_responses()
+ assert len(mock_sr.mock_calls) == 0
+ assert len(mock_invoke.mock_calls) == 1
+ calls = uut.get_mock_record()
+ assert len([calls.get_requests()]) == 1
+
+
+def test_fixture_exit_unregistered(patch_fixture):
+ uut.FORCE_REGISTRATION = True
+ with pytest.raises(AssertionError) as exc:
+ uut._patch_request_and_invoke_exit_checks('test_fixture_exit_unregistered')
+ msg = 'Error: responses for ZAPI invoke or REST send requests are not registered.'
+ assert msg in exc.value.args[0]
+ uut.FORCE_REGISTRATION = False
+
+
+def test_fixture_exit_unused_response(patch_fixture):
+ uut.FORCE_REGISTRATION = True
+ uut.register_responses([
+ ('get-version', build_zapi_response({}))])
+ # report an error if any response is not consumed
+ with pytest.raises(AssertionError) as exc:
+ uut._patch_request_and_invoke_exit_checks('test_fixture_exit_unused_response')
+ msg = 'Error: not all responses were processed. Use -s to see detailed error. Ignore this error if there is an earlier error in the test.'
+ assert msg in exc.value.args[0]
+ # consume the response
+ cx = netapp_utils.OntapZAPICx()
+ cx.invoke_elem(netapp_utils.zapi.NaElement.create_node_with_children('get-version'))
+ uut.FORCE_REGISTRATION = False
diff --git a/ansible_collections/netapp/ontap/tests/unit/framework/test_mock_rest_and_zapi_requests_no_netapp_lib.py b/ansible_collections/netapp/ontap/tests/unit/framework/test_mock_rest_and_zapi_requests_no_netapp_lib.py
new file mode 100644
index 000000000..4d929c975
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/framework/test_mock_rest_and_zapi_requests_no_netapp_lib.py
@@ -0,0 +1,94 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+""" unit tests for Ansible module unit test fixture amd helper mock_rest_and_zapi_requests """
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+import ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests as uut
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke as patch_fixture
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import zapi_responses, build_zapi_response, build_zapi_error
+
+# REST API canned responses when mocking send_request.
+# The rest_factory provides default responses shared across testcases.
+SRR = rest_responses()
+
+uut.DEBUG = True
+
+
+@pytest.fixture(autouse=True, scope='module')
+def patch_has_netapp_lib():
+ with patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.has_netapp_lib') as has_netapp_lib:
+ has_netapp_lib.return_value = False
+ yield has_netapp_lib
+
+
+def test_import_error_zapi_responses():
+ # ZAPI canned responses when mocking invoke_elem.
+ # The zapi_factory provides default responses shared across testcases.
+ ZRR = zapi_responses()
+ with pytest.raises(ImportError) as exc:
+ zapi = ZRR['empty']
+ print("ZAPI", zapi)
+ msg = 'build_zapi_response: netapp-lib is missing'
+ assert msg == exc.value.args[0]
+
+
+def test_register_responses():
+ get_version = build_zapi_response({})
+ with pytest.raises(ImportError) as exc:
+ uut.register_responses([
+ ('get-version', get_version),
+ ('get-bad-zapi', 'BAD_ZAPI')
+ ], 'test_register_responses')
+ msg = 'build_zapi_response: netapp-lib is missing'
+ assert msg == exc.value.args[0]
+
+
+def test_import_error_build_zapi_response():
+ zapi = build_zapi_response({})
+ expected = ('build_zapi_response: netapp-lib is missing', 'invalid')
+ assert expected == zapi
+
+
+def test_import_error_build_zapi_error():
+ zapi = build_zapi_error(12345, 'test')
+ expected = ('build_zapi_error: netapp-lib is missing', 'invalid')
+ assert expected == zapi
+
+
+class Module:
+ def __init__(self):
+ self.params = {
+ 'username': 'user',
+ 'password': 'pwd',
+ 'hostname': 'host',
+ 'use_rest': 'never',
+ 'cert_filepath': None,
+ 'key_filepath': None,
+ 'validate_certs': False,
+ 'http_port': None,
+ 'feature_flags': None,
+ }
+
+
+def test_fixture_no_netapp_lib(patch_fixture):
+ uut.register_responses([
+ ('GET', 'cluster', (200, {}, None))])
+ mock_sr = patch_fixture
+ cx = netapp_utils.OntapRestAPI(Module())
+ cx.send_request('GET', 'cluster', None)
+ assert 'test_fixture_no_netapp_lib' in uut._RESPONSES
+ assert 'test_fixture_no_netapp_lib' in uut._REQUESTS
+ uut.print_requests()
+ uut.print_requests_and_responses()
+ assert len(mock_sr.mock_calls) == 1
+ calls = uut.get_mock_record()
+ assert len([calls.get_requests()]) == 1
diff --git a/ansible_collections/netapp/ontap/tests/unit/framework/test_rest_factory.py b/ansible_collections/netapp/ontap/tests/unit/framework/test_rest_factory.py
new file mode 100644
index 000000000..f04dc4518
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/framework/test_rest_factory.py
@@ -0,0 +1,44 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+""" unit tests for Ansible module unit test helper rest_factory """
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import pytest
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+
+def test_default_responses():
+ srr = rest_responses()
+ assert srr
+ assert 'is_zapi' in srr
+ assert srr['is_zapi'] == (400, {}, "Unreachable")
+
+
+def test_add_response():
+ srr = rest_responses(
+ {'is_zapi': (444, {'k': 'v'}, "Unknown")}
+ )
+ assert srr
+ assert 'is_zapi' in srr
+ assert srr['is_zapi'] == (444, {'k': 'v'}, "Unknown")
+
+
+def test_negative_add_response():
+ with pytest.raises(KeyError) as exc:
+ srr = rest_responses(
+ {'is_zapi': (444, {'k': 'v'}, "Unknown")}, allow_override=False
+ )
+ print(exc.value)
+ assert 'duplicated key: is_zapi' == exc.value.args[0]
+
+
+def test_negative_key_does_not_exist():
+ srr = rest_responses()
+ with pytest.raises(KeyError) as exc:
+ srr['bad_key']
+ print(exc.value)
+ msg = 'bad_key not registered, list of valid keys:'
+ assert msg in exc.value.args[0]
diff --git a/ansible_collections/netapp/ontap/tests/unit/framework/test_zapi_factory.py b/ansible_collections/netapp/ontap/tests/unit/framework/test_zapi_factory.py
new file mode 100644
index 000000000..c82fb6a01
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/framework/test_zapi_factory.py
@@ -0,0 +1,108 @@
+# (c) 2018-2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+""" unit tests for Ansible module unit test helper zapi_factory """
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import pytest
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+from ansible_collections.netapp.ontap.tests.unit.framework import zapi_factory as uut
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib') # pragma: no cover
+
+AGGR_INFO = {'num-records': 3,
+ 'attributes-list':
+ {'aggr-attributes':
+ {'aggregate-name': 'aggr_name',
+ 'aggr-raid-attributes': {
+ 'state': 'online',
+ 'disk-count': '4',
+ 'encrypt-with-aggr-key': 'true'},
+ 'aggr-snaplock-attributes': {'snaplock-type': 'snap_t'}}
+ },
+ }
+
+
+def test_build_zapi_response_empty():
+ empty, valid = uut.build_zapi_response({})
+ assert valid == 'valid'
+ print(empty.to_string())
+ assert empty.to_string() == b'<results status="passed"/>'
+
+
+def test_build_zapi_response_dict():
+ aggr_info, valid = uut.build_zapi_response(AGGR_INFO)
+ assert valid == 'valid'
+ print(aggr_info.to_string())
+ aggr_str = aggr_info.to_string()
+ assert b'<aggregate-name>aggr_name</aggregate-name>' in aggr_str
+ assert b'<aggr-snaplock-attributes><snaplock-type>snap_t</snaplock-type></aggr-snaplock-attributes>' in aggr_str
+ assert b'<results status="passed">' in aggr_str
+ assert b'<num-records>3</num-records>' in aggr_str
+
+
+def test_build_zapi_error():
+ zapi1, valid = uut.build_zapi_error('54321', 'error_text')
+ assert valid == 'valid'
+ zapi2, valid = uut.build_zapi_error(54321, 'error_text')
+ assert valid == 'valid'
+ assert zapi1.to_string() == zapi2.to_string()
+ print(zapi1.to_string())
+ assert zapi1.to_string() == b'<results errno="54321" reason="error_text"/>'
+
+
+def test_default_responses():
+ zrr = uut.zapi_responses()
+ assert zrr
+ assert 'empty' in zrr
+ print(zrr['empty'][0].to_string())
+ assert zrr['empty'][0].to_string() == uut.build_zapi_response({})[0].to_string()
+
+
+def test_add_response():
+ zrr = uut.zapi_responses(
+ {'empty': uut.build_zapi_response({'k': 'v'}, 1)}
+ )
+ assert zrr
+ assert 'empty' in zrr
+ print(zrr['empty'][0].to_string())
+ assert zrr['empty'][0].to_string() == uut.build_zapi_response({'k': 'v'}, 1)[0].to_string()
+
+
+def test_negative_add_response():
+ with pytest.raises(KeyError) as exc:
+ zrr = uut.zapi_responses(
+ {'empty': uut.build_zapi_response({})}, allow_override=False
+ )
+ print(exc.value)
+ assert 'duplicated key: empty' == exc.value.args[0]
+
+
+def test_negative_add_default_error():
+ uut._DEFAULT_ERRORS['empty'] = uut.build_zapi_error(12345, 'hello')
+ with pytest.raises(KeyError) as exc:
+ zrr = uut.zapi_responses(allow_override=False)
+ print(exc.value)
+ assert 'duplicated key: empty' == exc.value.args[0]
+ del uut._DEFAULT_ERRORS['empty']
+
+
+def test_negative_add_error():
+ with pytest.raises(KeyError) as exc:
+ zrr = uut.zapi_responses(
+ {'empty': uut.build_zapi_error(12345, 'hello')}, allow_override=False
+ )
+ print(exc.value)
+ assert 'duplicated key: empty' == exc.value.args[0]
+
+
+def test_negative_key_does_not_exist():
+ zrr = uut.zapi_responses()
+ with pytest.raises(KeyError) as exc:
+ zrr['bad_key']
+ print(exc.value)
+ msg = 'bad_key not registered, list of valid keys:'
+ assert msg in exc.value.args[0]
diff --git a/ansible_collections/netapp/ontap/tests/unit/framework/ut_utilities.py b/ansible_collections/netapp/ontap/tests/unit/framework/ut_utilities.py
new file mode 100644
index 000000000..ac770604a
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/framework/ut_utilities.py
@@ -0,0 +1,31 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+# Author: Laurent Nicolas, laurentn@netapp.com
+
+""" unit tests for Ansible modules for ONTAP:
+ shared utility functions
+"""
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import sys
+
+
+def is_indexerror_exception_formatted():
+ """ some versions of python do not format IndexError exception properly
+ the error message is not reported in str() or repr()
+ We see this for older versions of Ansible, where the python version is frozen
+ - fails on 3.5.7 but works on 3.5.10
+ - fails on 3.6.8 but works on 3.6.9
+ - fails on 3.7.4 but works on 3.7.5
+ - fails on 3.8.0 but works on 3.8.1
+ """
+ return (
+ sys.version_info[:2] == (2, 7)
+ or (sys.version_info[:2] == (3, 5) and sys.version_info[:3] > (3, 5, 7))
+ or (sys.version_info[:2] == (3, 6) and sys.version_info[:3] > (3, 6, 8))
+ or (sys.version_info[:2] == (3, 7) and sys.version_info[:3] > (3, 7, 4))
+ or (sys.version_info[:2] == (3, 8) and sys.version_info[:3] > (3, 8, 0))
+ or sys.version_info[:2] >= (3, 9)
+ )
diff --git a/ansible_collections/netapp/ontap/tests/unit/framework/zapi_factory.py b/ansible_collections/netapp/ontap/tests/unit/framework/zapi_factory.py
new file mode 100644
index 000000000..5f23fbad0
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/framework/zapi_factory.py
@@ -0,0 +1,148 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+# Author: Laurent Nicolas, laurentn@netapp.com
+
+""" unit tests for Ansible modules for ONTAP:
+ utility to build REST responses and errors, and register them to use them in testcases.
+
+ 1) at the module level, define the ZAPI responses:
+ ZRR = zapi_responses() if you're only interested in the default ones: 'empty', 'error', ...
+ or
+ ZRR = zapi_responses(dict) to use the default ones and augment them:
+ a key identifies a response name, and the value is an XML structure.
+
+ 2) create a ZAPI XML response or error using
+ build_zapi_response(contents, num_records=None)
+ build_zapi_error(errno, reason)
+
+ Typically, these will be used with zapi_responses as
+
+ ZRR = zapi_responses({
+ 'aggr_info': build_zapi_response(aggr_info),
+ 'object_store_info': build_zapi_response(object_store_info),
+ 'disk_info': build_zapi_response(disk_info),
+ })
+
+ 3) in each test function, create a list of (event, response) using zapi_responses (and rest_responses)
+ def test_create(self):
+ register_responses([
+ ('aggr-get-iter', ZRR['empty']),
+ ('aggr-create', ZRR['empty']),
+ ('aggr-get-iter', ZRR['empty']),
+ ])
+
+ See ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_aggregate.py
+ for an example.
+"""
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+
+# name: (dict, num_records)
+# dict is translated into an xml structure, num_records is None or an integer >= 0
+_DEFAULT_RESPONSES = {
+ 'empty': ({}, None),
+ 'success': ({}, None),
+ 'no_records': ({'num-records': '0'}, None),
+ 'one_record_no_data': ({'num-records': '1'}, None),
+ 'version': ({'version': 'zapi_version'}, None),
+ 'cserver': ({
+ 'attributes-list': {
+ 'vserver-info': {
+ 'vserver-name': 'cserver'
+ }
+ }}, 1),
+}
+# name: (errno, reason)
+# errno as int, reason as str
+_DEFAULT_ERRORS = {
+ 'error': (12345, 'synthetic error for UT purpose'),
+ 'error_missing_api': (13005, 'Unable to find API: xxxx on data vserver')
+}
+
+
+def get_error_desc(error_code):
+ return next((err_desc for err_num, err_desc in _DEFAULT_ERRORS.values() if err_num == error_code),
+ 'no registered error for %d' % error_code)
+
+
+def zapi_error_message(error, error_code=12345, reason=None, addal=None):
+ if reason is None:
+ reason = get_error_desc(error_code)
+ msg = "%s: NetApp API failed. Reason - %s:%s" % (error, error_code, reason)
+ if addal:
+ msg += addal
+ return msg
+
+
+def build_raw_xml_response(contents, num_records=None, force_dummy=False):
+ top_contents = {'results': contents}
+ xml, valid = build_zapi_response(top_contents)
+ if valid == 'valid' and not force_dummy:
+ return xml.to_string()
+ return b'<xml><results status="netapp-lib is missing"/></xml>'
+
+
+def build_zapi_response(contents, num_records=None):
+ ''' build an XML response
+ contents is translated into an xml structure
+ num_records is None or an integer >= 0
+ '''
+ if not netapp_utils.has_netapp_lib():
+ # do not report an error at init, as it breaks ansible-test checks
+ return 'build_zapi_response: netapp-lib is missing', 'invalid'
+ if num_records is not None:
+ contents['num-records'] = str(num_records)
+ response = netapp_utils.zapi.NaElement('results')
+ response.translate_struct(contents)
+ response.add_attr('status', 'passed')
+ return (response, 'valid')
+
+
+def build_zapi_error(errno, reason):
+ ''' build an XML response
+ errno as int
+ reason as str
+ '''
+ if not netapp_utils.has_netapp_lib():
+ return 'build_zapi_error: netapp-lib is missing', 'invalid'
+ response = netapp_utils.zapi.NaElement('results')
+ response.add_attr('errno', str(errno))
+ response.add_attr('reason', reason)
+ return (response, 'valid')
+
+
+class zapi_responses:
+
+ def __init__(self, adict=None, allow_override=True):
+ self.responses = {}
+ for key, value in _DEFAULT_RESPONSES.items():
+ self.responses[key] = build_zapi_response(*value)
+ for key, value in _DEFAULT_ERRORS.items():
+ if key in self.responses:
+ raise KeyError('duplicated key: %s' % key)
+ self.responses[key] = build_zapi_error(*value)
+ if adict:
+ for key, value in adict.items():
+ if not allow_override and key in self.responses:
+ raise KeyError('duplicated key: %s' % key)
+ self.responses[key] = value
+
+ def _get_response(self, name):
+ try:
+ value, valid = self.responses[name]
+ # sanity checks for netapp-lib are deferred until the test is actually run
+ if valid != 'valid':
+ print("Error: Defer any runtime dereference, eg ZRR['key'], until runtime or protect dereference under has_netapp_lib().")
+ raise ImportError(value)
+ return value, valid
+ except KeyError:
+ raise KeyError('%s not registered, list of valid keys: %s' % (name, self.responses.keys()))
+
+ def __getitem__(self, name):
+ return self._get_response(name)
+
+ def __contains__(self, name):
+ return name in self.responses
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/filter/test_na_filter_iso8601.py b/ansible_collections/netapp/ontap/tests/unit/plugins/filter/test_na_filter_iso8601.py
new file mode 100644
index 000000000..7bdb37191
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/filter/test_na_filter_iso8601.py
@@ -0,0 +1,66 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+""" unit tests for iso8601 filter """
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import pytest
+import sys
+
+from ansible.errors import AnsibleFilterError
+from ansible_collections.netapp.ontap.plugins.filter import na_filter_iso8601
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+from ansible_collections.netapp.ontap.tests.unit.framework import ut_utilities
+
+if na_filter_iso8601.IMPORT_ERROR and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as isodate is not available')
+
+ISO_DURATION = 'P689DT13H57M44S'
+ISO_DURATION_WEEKS = 'P98W'
+SECONDS_DURATION = 59579864
+
+
+def test_class_filter():
+ my_obj = na_filter_iso8601.FilterModule()
+ assert len(my_obj.filters()) == 2
+
+
+def test_iso8601_duration_to_seconds():
+ my_obj = na_filter_iso8601.FilterModule()
+ assert my_obj.filters()['iso8601_duration_to_seconds'](ISO_DURATION) == SECONDS_DURATION
+
+
+def test_negative_iso8601_duration_to_seconds():
+ my_obj = na_filter_iso8601.FilterModule()
+ with pytest.raises(AnsibleFilterError) as exc:
+ my_obj.filters()['iso8601_duration_to_seconds']('BAD_DATE')
+ print('EXC', exc)
+ # exception is not properly formatted with older 3.x versions, assuming same issue as for IndexError
+ if ut_utilities.is_indexerror_exception_formatted():
+ assert 'BAD_DATE' in str(exc)
+
+
+def test_iso8601_duration_from_seconds():
+ my_obj = na_filter_iso8601.FilterModule()
+ assert my_obj.filters()['iso8601_duration_from_seconds'](SECONDS_DURATION) == ISO_DURATION
+
+
+def test_negative_iso8601_duration_from_seconds_str():
+ my_obj = na_filter_iso8601.FilterModule()
+ with pytest.raises(AnsibleFilterError) as exc:
+ my_obj.filters()['iso8601_duration_from_seconds']('BAD_INT')
+ print('EXC', exc)
+ if ut_utilities.is_indexerror_exception_formatted():
+ assert 'BAD_INT' in str(exc)
+
+
+@patch('ansible_collections.netapp.ontap.plugins.filter.na_filter_iso8601.IMPORT_ERROR', 'import failed')
+def test_negative_check_for_import():
+ my_obj = na_filter_iso8601.FilterModule()
+ with pytest.raises(AnsibleFilterError) as exc:
+ my_obj.filters()['iso8601_duration_to_seconds'](ISO_DURATION)
+ print('EXC', exc)
+ if ut_utilities.is_indexerror_exception_formatted():
+ assert 'import failed' in str(exc)
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/ansible_mocks.py b/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/ansible_mocks.py
new file mode 100644
index 000000000..85c0bc1b2
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/ansible_mocks.py
@@ -0,0 +1,181 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import copy
+import json
+import pytest
+from ansible.module_utils import basic
+from ansible.module_utils._text import to_bytes
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+from ansible_collections.netapp.ontap.plugins.module_utils.netapp import ZAPI_DEPRECATION_MESSAGE
+
+VERBOSE = True
+
+
+def set_module_args(args):
+ """prepare arguments so that they will be picked up during module creation"""
+ args = json.dumps({'ANSIBLE_MODULE_ARGS': args})
+ basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access
+
+
+class AnsibleExitJson(Exception):
+ """Exception class to be raised by module.exit_json and caught by the test case"""
+
+
+class AnsibleFailJson(Exception):
+ """Exception class to be raised by module.fail_json and caught by the test case"""
+
+
+def exit_json(*args, **kwargs): # pylint: disable=unused-argument
+ """function to patch over exit_json; package return data into an exception"""
+ if 'changed' not in kwargs:
+ kwargs['changed'] = False
+ raise AnsibleExitJson(kwargs)
+
+
+def fail_json(*args, **kwargs): # pylint: disable=unused-argument
+ """function to patch over fail_json; package return data into an exception"""
+ kwargs['failed'] = True
+ raise AnsibleFailJson(kwargs)
+
+
+WARNINGS = []
+
+
+def warn(dummy, msg):
+ print('WARNING:', msg)
+ WARNINGS.append(msg)
+
+
+def expect_and_capture_ansible_exception(function, exception, *args, **kwargs):
+ ''' wraps a call to a funtion in a pytest.raises context and return the exception data as a dict
+
+ function: the function to call -- without ()
+ exception: 'exit' or 'fail' to trap Ansible exceptions raised by exit_json or fail_json
+ can also take an exception to test some corner cases (eg KeyError)
+ *args, **kwargs to capture any function arguments
+ '''
+ if exception in ('fail', 'exit'):
+ exception = AnsibleFailJson if exception == 'fail' else AnsibleExitJson
+ if not (isinstance(exception, type) and issubclass(exception, Exception)):
+ raise KeyError('Error: got: %s, expecting fail, exit, or some exception' % exception)
+ with pytest.raises(exception) as exc:
+ function(*args, **kwargs)
+ if VERBOSE:
+ print('EXC:', exception, exc.value)
+ if exception in (AnsibleExitJson, AnsibleFailJson, Exception, AttributeError, KeyError, TypeError, ValueError):
+ return exc.value.args[0]
+ return exc
+
+
+def call_main(my_main, default_args=None, module_args=None, fail=False):
+ ''' utility function to call a module main() entry point
+ my_main: main function for a module
+ default_args: a dict for the Ansible options - in general, what is accepted by all tests
+ module_args: additional options - in general what is specific to a test
+
+ call main and should raise AnsibleExitJson or AnsibleFailJson
+ '''
+ args = copy.deepcopy(default_args) if default_args else {}
+ if module_args:
+ args.update(module_args)
+ set_module_args(args)
+ return expect_and_capture_ansible_exception(my_main, 'fail' if fail else 'exit')
+
+
+def create_module(my_module, default_args=None, module_args=None, check_mode=None, fail=False):
+ ''' utility function to create a module object
+ my_module: a class that represent an ONTAP Ansible module
+ default_args: a dict for the Ansible options - in general, what is accepted by all tests
+ module_args: additional options - in general what is specific to a test
+ check_mode: True/False - if not None, check_mode is set accordingly
+
+ returns an instance of the module
+ '''
+ args = copy.deepcopy(default_args) if default_args else {}
+ if module_args:
+ args.update(module_args)
+ set_module_args(args)
+ if fail:
+ return expect_and_capture_ansible_exception(my_module, 'fail')
+ my_module_object = my_module()
+ if check_mode is not None:
+ my_module_object.module.check_mode = check_mode
+ return my_module_object
+
+
+def create_and_apply(my_module, default_args=None, module_args=None, fail=False, check_mode=None):
+ ''' utility function to create a module and call apply
+
+ calls create_module, then calls the apply function and checks for:
+ AnsibleExitJson exception if fail is False or not present.
+ AnsibleFailJson exception if fail is True.
+
+ see create_module for a description of the other arguments.
+ '''
+ try:
+ my_obj = create_module(my_module, default_args, module_args, check_mode)
+ except Exception as exc:
+ print('Unexpected exception returned in create_module: %s' % exc)
+ print('If expected, use create_module with fail=True.')
+ raise
+ return expect_and_capture_ansible_exception(my_obj.apply, 'fail' if fail else 'exit')
+
+
+# using pytest natively, without unittest.TestCase
+@pytest.fixture(autouse=True)
+def patch_ansible():
+ with patch.multiple(basic.AnsibleModule,
+ exit_json=exit_json,
+ fail_json=fail_json,
+ warn=warn) as mocks:
+ clear_warnings()
+ # so that we get a SystemExit: 1 error (no able to read from stdin in ansible-test !)
+ # if set_module_args() was not called
+ basic._ANSIBLE_ARGS = None
+ yield mocks
+
+
+def get_warnings():
+ return WARNINGS
+
+
+def print_warnings(framed=True):
+ if framed:
+ sep = '-' * 7
+ title = ' WARNINGS '
+ print(sep, title, sep)
+ for warning in WARNINGS:
+ print(warning)
+ if framed:
+ sep = '-' * (7 * 2 + len(title))
+ print(sep)
+
+
+def assert_no_warnings():
+ assert not WARNINGS
+
+
+def assert_no_warnings_except_zapi():
+ # Deprecation message can appear more than once. Remove will only remove the first instance.
+ local_warning = list(set(WARNINGS))
+ tmp_warnings = local_warning[:]
+ for warning in tmp_warnings:
+ if warning in ZAPI_DEPRECATION_MESSAGE:
+ local_warning.remove(ZAPI_DEPRECATION_MESSAGE)
+ assert not local_warning
+
+
+def assert_warning_was_raised(warning, partial_match=False):
+ if partial_match:
+ assert any(warning in msg for msg in WARNINGS)
+ else:
+ assert warning in WARNINGS
+
+
+def clear_warnings():
+ global WARNINGS
+ WARNINGS = []
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_netapp.py b/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_netapp.py
new file mode 100644
index 000000000..a96f08dea
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_netapp.py
@@ -0,0 +1,182 @@
+# Copyright (c) 2018-2022 NetApp
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests for module_utils netapp.py - general features '''
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+from ansible.module_utils import basic
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import \
+ patch_ansible, create_module, expect_and_capture_ansible_exception
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+
+DEFAULT_ARGS = {
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'cert_filepath': None,
+ 'key_filepath': None,
+}
+
+CERT_ARGS = {
+ 'hostname': 'test',
+ 'cert_filepath': 'test_pem.pem',
+ 'key_filepath': 'test_key.key'
+}
+
+
+class MockONTAPModule:
+ def __init__(self):
+ self.module = basic.AnsibleModule(netapp_utils.na_ontap_host_argument_spec())
+
+
+def create_ontap_module(default_args=None, module_args=None):
+ return create_module(MockONTAPModule, default_args, module_args).module
+
+
+def test_has_feature_success_default():
+ ''' existing feature_flag with default '''
+ flag = 'deprecation_warning'
+ module = create_ontap_module(DEFAULT_ARGS)
+ assert netapp_utils.has_feature(module, flag)
+
+
+def test_has_feature_success_user_true():
+ ''' existing feature_flag with value set to True '''
+ flag = 'user_deprecation_warning'
+ module_args = {'feature_flags': {flag: True}}
+ module = create_ontap_module(DEFAULT_ARGS, module_args)
+ assert netapp_utils.has_feature(module, flag)
+
+
+def test_has_feature_success_user_false():
+ ''' existing feature_flag with value set to False '''
+ flag = 'user_deprecation_warning'
+ module_args = {'feature_flags': {flag: False}}
+ module = create_ontap_module(DEFAULT_ARGS, module_args)
+ assert not netapp_utils.has_feature(module, flag)
+
+
+def test_has_feature_invalid_key():
+ ''' existing feature_flag with unknown key '''
+ flag = 'deprecation_warning_bad_key'
+ module = create_ontap_module(DEFAULT_ARGS)
+ msg = 'Internal error: unexpected feature flag: %s' % flag
+ assert expect_and_capture_ansible_exception(netapp_utils.has_feature, 'fail', module, flag)['msg'] == msg
+
+
+def test_has_feature_invalid_bool():
+ ''' existing feature_flag with non boolean value '''
+ flag = 'user_deprecation_warning'
+ module_args = {'feature_flags': {flag: 'non bool'}}
+ module = create_ontap_module(DEFAULT_ARGS, module_args)
+ msg = 'Error: expected bool type for feature flag: %s' % flag
+ assert expect_and_capture_ansible_exception(netapp_utils.has_feature, 'fail', module, flag)['msg'] == msg
+
+
+def test_get_na_ontap_host_argument_spec_peer():
+ ''' validate spec does not have default key and feature_flags option '''
+ spec = netapp_utils.na_ontap_host_argument_spec_peer()
+ for key in ('username', 'https'):
+ assert key in spec
+ assert 'feature_flags' not in spec
+ for entry in spec.values():
+ assert 'type' in entry
+ assert 'default' not in entry
+
+
+def test_setup_host_options_from_module_params_from_empty():
+ ''' make sure module.params options are reflected in host_options '''
+ module = create_ontap_module(DEFAULT_ARGS)
+ host_options = {}
+ keys = ('hostname', 'username')
+ netapp_utils.setup_host_options_from_module_params(host_options, module, keys)
+ # we gave 2 keys
+ assert len(host_options) == 2
+ for key in keys:
+ assert host_options[key] == DEFAULT_ARGS[key]
+
+
+def test_setup_host_options_from_module_params_username_not_set_when_cert_present():
+ ''' make sure module.params options are reflected in host_options '''
+ module = create_ontap_module(DEFAULT_ARGS)
+ host_options = dict(cert_filepath='some_path')
+ unchanged_keys = tuple(host_options.keys())
+ copied_over_keys = ('hostname',)
+ ignored_keys = ('username',)
+ keys = unchanged_keys + copied_over_keys + ignored_keys
+ netapp_utils.setup_host_options_from_module_params(host_options, module, keys)
+ # we gave 2 keys
+ assert len(host_options) == 2
+ for key in ignored_keys:
+ assert key not in host_options
+ for key in copied_over_keys:
+ assert host_options[key] == DEFAULT_ARGS[key]
+ print(host_options)
+ for key in unchanged_keys:
+ assert host_options[key] != DEFAULT_ARGS[key]
+
+
+def test_setup_host_options_from_module_params_not_none_fields_are_preserved():
+ ''' make sure module.params options are reflected in host_options '''
+ args = dict(DEFAULT_ARGS)
+ args['cert_filepath'] = 'some_path'
+ module = create_ontap_module(args)
+ host_options = dict(cert_filepath='some_other_path')
+ unchanged_keys = tuple(host_options.keys())
+ copied_over_keys = ('hostname',)
+ ignored_keys = ('username',)
+ keys = unchanged_keys + copied_over_keys + ignored_keys
+ netapp_utils.setup_host_options_from_module_params(host_options, module, keys)
+ # we gave 2 keys
+ assert len(host_options) == 2
+ for key in ignored_keys:
+ assert key not in host_options
+ for key in copied_over_keys:
+ assert host_options[key] == args[key]
+ print(host_options)
+ for key in unchanged_keys:
+ assert host_options[key] != args[key]
+
+
+def test_setup_host_options_from_module_params_cert_not_set_when_username_present():
+ ''' make sure module.params options are reflected in host_options '''
+ args = dict(DEFAULT_ARGS)
+ args['cert_filepath'] = 'some_path'
+ module = create_ontap_module(args)
+ host_options = dict(username='some_name')
+ unchanged_keys = tuple(host_options.keys())
+ copied_over_keys = ('hostname',)
+ ignored_keys = ('cert_filepath',)
+ keys = unchanged_keys + copied_over_keys + ignored_keys
+ netapp_utils.setup_host_options_from_module_params(host_options, module, keys)
+ # we gave 2 keys
+ assert len(host_options) == 2
+ for key in ignored_keys:
+ assert key not in host_options
+ for key in copied_over_keys:
+ assert host_options[key] == args[key]
+ print(host_options)
+ for key in unchanged_keys:
+ assert host_options[key] != args[key]
+
+
+def test_setup_host_options_from_module_params_conflict():
+ ''' make sure module.params options are reflected in host_options '''
+ module = create_ontap_module(DEFAULT_ARGS)
+ host_options = dict(username='some_name', key_filepath='not allowed')
+ msg = 'Error: host cannot have both basic authentication (username/password) and certificate authentication (cert/key files).'
+ assert expect_and_capture_ansible_exception(netapp_utils.setup_host_options_from_module_params,
+ 'fail', host_options, module, host_options.keys())['msg'] == msg
+
+
+def test_set_auth_method():
+ args = {'hostname': None}
+ # neither password nor cert
+ error = expect_and_capture_ansible_exception(netapp_utils.set_auth_method, 'fail', create_ontap_module(args), None, None, None, None)['msg']
+ assert 'Error: ONTAP module requires username/password or SSL certificate file(s)' in error
+ # keyfile but no cert
+ error = expect_and_capture_ansible_exception(netapp_utils.set_auth_method, 'fail', create_ontap_module(args), None, None, None, 'keyfile')['msg']
+ assert 'Error: cannot have a key file without a cert file' in error
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_netapp_invoke_elem.py b/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_netapp_invoke_elem.py
new file mode 100644
index 000000000..a185fcb2d
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_netapp_invoke_elem.py
@@ -0,0 +1,154 @@
+# Copyright (c) 2022 NetApp
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+"""unit tests for module_utils netapp.py - ZAPI invoke_elem
+
+ We cannot use the general UT framework as it patches invoke_elem
+"""
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import pytest
+import sys
+
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import \
+ patch_ansible, expect_and_capture_ansible_exception
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_raw_xml_response, zapi_responses
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip("skipping as missing required netapp_lib")
+
+ZRR = zapi_responses({
+})
+
+
+class MockModule:
+ def __init__(self):
+ self._name = 'testme'
+
+
+class MockOpener:
+ def __init__(self, response=None, exception=None):
+ self.response = response
+ self.exception = exception
+ self.timeout = -1
+
+ def open(self, request, timeout=None):
+ self.timeout = timeout
+ if self.exception:
+ raise self.exception
+ return self.response
+
+
+class MockResponse:
+ def __init__(self, contents, force_dummy=False):
+ self.response = build_raw_xml_response(contents, force_dummy=force_dummy)
+ print('RESPONSE', self.response)
+
+ def read(self):
+ return self.response
+
+
+def create_ontapzapicx_object():
+ return netapp_utils.OntapZAPICx(module=MockModule())
+
+
+def test_error_invalid_naelement():
+ ''' should fail when NaElement is None, empty, or not of type NaElement '''
+ zapi_cx = create_ontapzapicx_object()
+ assert str(expect_and_capture_ansible_exception(zapi_cx.invoke_elem, ValueError, {})) ==\
+ 'NaElement must be supplied to invoke API'
+ assert str(expect_and_capture_ansible_exception(zapi_cx.invoke_elem, ValueError, {'x': 'yz'})) ==\
+ 'NaElement must be supplied to invoke API'
+
+
+def test_exception_with_opener_generic_exception():
+ zapi_cx = create_ontapzapicx_object()
+ zapi_cx._refresh_conn = False
+ zapi_cx._opener = MockOpener(exception=KeyError('testing'))
+ exc = expect_and_capture_ansible_exception(zapi_cx.invoke_elem, netapp_utils.zapi.NaApiError, ZRR['success'][0])
+ # KeyError('testing') in 3.x but KeyError('testing',) with 2.7
+ assert str(exc.value).startswith("NetApp API failed. Reason - Unexpected error:KeyError('testing'")
+
+
+def test_exception_with_opener_httperror():
+ if not hasattr(netapp_utils.zapi.urllib.error.HTTPError, 'reason'):
+ # skip the test in 2.6 as netapp_lib is not fully supported
+ # HTTPError does not support reason, and it's not worth changing the code
+ # raise zapi.NaApiError(exc.code, exc.reason)
+ # AttributeError: 'HTTPError' object has no attribute 'reason'
+ pytest.skip('this test requires HTTPError.reason which is not available in python 2.6')
+ zapi_cx = create_ontapzapicx_object()
+ zapi_cx._refresh_conn = False
+ zapi_cx._opener = MockOpener(exception=netapp_utils.zapi.urllib.error.HTTPError('url', 400, 'testing', None, None))
+ exc = expect_and_capture_ansible_exception(zapi_cx.invoke_elem, netapp_utils.zapi.NaApiError, ZRR['success'][0])
+ assert str(exc.value) == 'NetApp API failed. Reason - 400:testing'
+
+
+def test_exception_with_opener_urlerror():
+ # ConnectionRefusedError is not defined in 2.7
+ connection_error = ConnectionRefusedError('UT') if sys.version_info >= (3, 0) else 'connection_error'
+ zapi_cx = create_ontapzapicx_object()
+ zapi_cx._refresh_conn = False
+ zapi_cx._opener = MockOpener(exception=netapp_utils.zapi.urllib.error.URLError(connection_error))
+ exc = expect_and_capture_ansible_exception(zapi_cx.invoke_elem, netapp_utils.zapi.NaApiError, ZRR['success'][0])
+ # skip the assert for 2.7
+ # ConnectionRefusedError('UT'), with 3.x but ConnectionRefusedError('UT',), with 3.5
+ assert str(exc.value).startswith("NetApp API failed. Reason - Unable to connect:(ConnectionRefusedError('UT'") or sys.version_info < (3, 0)
+
+ zapi_cx._opener = MockOpener(exception=netapp_utils.zapi.urllib.error.URLError('connection_error'))
+ exc = expect_and_capture_ansible_exception(zapi_cx.invoke_elem, netapp_utils.zapi.NaApiError, ZRR['success'][0])
+ # URLError('connection_error') with 3.x but URL error:URLError('connection_error',) with 2.7
+ assert str(exc.value).startswith("NetApp API failed. Reason - URL error:URLError('connection_error'")
+
+ # force an exception when reading exc.reason
+ exc = netapp_utils.zapi.urllib.error.URLError('connection_error')
+ delattr(exc, 'reason')
+ zapi_cx._opener = MockOpener(exception=exc)
+ exc = expect_and_capture_ansible_exception(zapi_cx.invoke_elem, netapp_utils.zapi.NaApiError, ZRR['success'][0])
+ # URLError('connection_error') with 3.x but URL error:URLError('connection_error',) with 2.7
+ assert str(exc.value).startswith("NetApp API failed. Reason - URL error:URLError('connection_error'")
+
+
+def test_response():
+ zapi_cx = create_ontapzapicx_object()
+ zapi_cx._refresh_conn = False
+ zapi_cx._timeout = 10
+ zapi_cx._trace = True
+ zapi_cx._opener = MockOpener(MockResponse({}))
+ response = zapi_cx.invoke_elem(ZRR['success'][0])
+ print(response)
+ assert response.to_string() == b'<results/>'
+ assert zapi_cx._opener.timeout == 10
+
+
+def test_response_no_netapp_lib():
+ zapi_cx = create_ontapzapicx_object()
+ zapi_cx._refresh_conn = False
+ zapi_cx._timeout = 10
+ zapi_cx._trace = True
+ zapi_cx._opener = MockOpener(MockResponse({}, True))
+ response = zapi_cx.invoke_elem(ZRR['success'][0])
+ print(response)
+ assert response.to_string() == b'<results status="netapp-lib is missing"/>'
+ assert zapi_cx._opener.timeout == 10
+
+
+def mock_build_opener(zapi_cx, opener):
+ def build_opener():
+ zapi_cx._opener = opener
+ return build_opener
+
+
+def test_response_build_opener():
+ zapi_cx = create_ontapzapicx_object()
+ zapi_cx._refresh_conn = False
+ zapi_cx._trace = True
+ zapi_cx._build_opener = mock_build_opener(zapi_cx, MockOpener(MockResponse({})))
+ response = zapi_cx.invoke_elem(ZRR['success'][0])
+ print(response)
+ assert response.to_string() == b'<results/>'
+ assert zapi_cx._opener.timeout is None
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_netapp_ipaddress.py b/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_netapp_ipaddress.py
new file mode 100644
index 000000000..5d381d852
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_netapp_ipaddress.py
@@ -0,0 +1,95 @@
+# Copyright (c) 2022 NetApp
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests for module_utils netapp_ipaddress.py - REST features '''
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import pytest
+import sys
+
+from ansible.module_utils import basic
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import expect_and_capture_ansible_exception, patch_ansible, create_module
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+from ansible_collections.netapp.ontap.plugins.module_utils import netapp_ipaddress
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+
+class MockONTAPModule(object):
+ def __init__(self):
+ self.module = basic.AnsibleModule(netapp_utils.na_ontap_host_argument_spec())
+
+
+def create_ontap_module(args=None):
+ if args is None:
+ args = {'hostname': 'xxx'}
+ return create_module(MockONTAPModule, args)
+
+
+def test_check_ipaddress_is_present():
+ assert netapp_ipaddress._check_ipaddress_is_present(None) is None
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp_ipaddress.HAS_IPADDRESS_LIB', False)
+def test_module_fail_when_netapp_lib_missing():
+ ''' required lib missing '''
+ error = 'Error: the python ipaddress package is required for this module. Import error: None'
+ assert error in expect_and_capture_ansible_exception(netapp_ipaddress._check_ipaddress_is_present, 'fail', create_ontap_module().module)['msg']
+
+
+def test_validate_and_compress_ip_address():
+ module = create_ontap_module().module
+ valid_addresses = [
+ # IPv4
+ ['10.11.12.13', '10.11.12.13'],
+ # IPv6
+ ['1111:0123:0012:0001:abcd:0abc:9891:abcd', '1111:123:12:1:abcd:abc:9891:abcd'],
+ ['1111:0000:0000:0000:abcd:0abc:9891:abcd', '1111::abcd:abc:9891:abcd'],
+ ['1111:0000:0000:0012:abcd:0000:0000:abcd', '1111::12:abcd:0:0:abcd'],
+ ['ffff:ffff:0000:0000:0000:0000:0000:0000', 'ffff:ffff::'],
+ ]
+ for before, after in valid_addresses:
+ assert after == netapp_ipaddress.validate_and_compress_ip_address(before, module)
+
+
+def test_negative_validate_and_compress_ip_address():
+ module = create_ontap_module().module
+ invalid_addresses = [
+ # IPv4
+ ['10.11.12.345', 'Invalid IP address value 10.11.12.345'],
+ # IPv6
+ ['1111:0123:0012:0001:abcd:0abc:9891:abcg', 'Invalid IP address value'],
+ ['1111:0000:0000:0000:abcd:9891:abcd', 'Invalid IP address value'],
+ ['1111:::0012:abcd::abcd', 'Invalid IP address value'],
+ ]
+ for before, error in invalid_addresses:
+ assert error in expect_and_capture_ansible_exception(netapp_ipaddress.validate_and_compress_ip_address, 'fail', before, module)['msg']
+
+
+def test_netmask_to_len():
+ module = create_ontap_module().module
+ assert netapp_ipaddress.netmask_to_netmask_length('10.10.10.10', '255.255.0.0', module) == 16
+ assert netapp_ipaddress.netmask_to_netmask_length('1111::', 16, module) == 16
+ assert netapp_ipaddress.netmask_to_netmask_length('1111::', '16', module) == 16
+ error = 'Error: only prefix_len is supported for IPv6 addresses, got ffff::'
+ assert error in expect_and_capture_ansible_exception(netapp_ipaddress.netmask_to_netmask_length, 'fail', '1111::', 'ffff::', module)['msg']
+ error = 'Error: Invalid IP network value 10.11.12.13/abc.'
+ assert error in expect_and_capture_ansible_exception(netapp_ipaddress.netmask_to_netmask_length, 'fail', '10.11.12.13', 'abc', module)['msg']
+
+
+def test_len_to_netmask():
+ module = create_ontap_module().module
+ assert netapp_ipaddress.netmask_length_to_netmask('10.10.10.10', 16, module) == '255.255.0.0'
+ assert netapp_ipaddress.netmask_length_to_netmask('1111::', 16, module) == 'ffff::'
+
+
+def test_validate_ip_address_is_network_address():
+ module = create_ontap_module().module
+ assert netapp_ipaddress.validate_ip_address_is_network_address('10.11.12.0', module) is None
+ assert netapp_ipaddress.validate_ip_address_is_network_address('10.11.12.0/24', module) is None
+ error = 'Error: Invalid IP network value 10.11.12.0/21'
+ assert error in expect_and_capture_ansible_exception(netapp_ipaddress.validate_ip_address_is_network_address, 'fail', '10.11.12.0/21', module)['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_netapp_module.py b/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_netapp_module.py
new file mode 100644
index 000000000..55729d7cd
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_netapp_module.py
@@ -0,0 +1,885 @@
+# Copyright (c) 2018-2022 NetApp
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+""" unit tests for module_utils netapp_module.py """
+from __future__ import (absolute_import, division, print_function)
+import copy
+__metaclass__ = type
+
+import pytest
+import sys
+
+from ansible.module_utils import basic
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+from ansible_collections.netapp.ontap.plugins.module_utils.netapp_module import NetAppModule as na_helper, cmp as na_cmp
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import\
+ assert_no_warnings, assert_warning_was_raised, clear_warnings, patch_ansible, create_module, expect_and_capture_ansible_exception
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response
+from ansible_collections.netapp.ontap.tests.unit.framework import ut_utilities
+
+
+class MockONTAPModule(object):
+ def __init__(self):
+ self.module = basic.AnsibleModule(netapp_utils.na_ontap_host_argument_spec())
+ self.na_helper = na_helper(self.module)
+ self.na_helper.set_parameters(self.module.params)
+
+
+class MockONTAPModuleV2(object):
+ def __init__(self):
+ self.module = basic.AnsibleModule(netapp_utils.na_ontap_host_argument_spec())
+ self.na_helper = na_helper(self)
+ self.na_helper.set_parameters(self.module.params)
+
+
+def create_ontap_module(args=None, version=1):
+ if version == 2:
+ return create_module(MockONTAPModuleV2, args)
+ return create_module(MockONTAPModule, args)
+
+
+def test_get_cd_action_create():
+ """ validate cd_action for create """
+ current = None
+ desired = {'state': 'present'}
+ my_obj = na_helper()
+ result = my_obj.get_cd_action(current, desired)
+ assert result == 'create'
+
+
+def test_get_cd_action_delete():
+ """ validate cd_action for delete """
+ current = {'state': 'absent'}
+ desired = {'state': 'absent'}
+ my_obj = na_helper()
+ result = my_obj.get_cd_action(current, desired)
+ assert result == 'delete'
+
+
+def test_get_cd_action_already_exist():
+ """ validate cd_action for returning None """
+ current = {'state': 'whatever'}
+ desired = {'state': 'present'}
+ my_obj = na_helper()
+ result = my_obj.get_cd_action(current, desired)
+ assert result is None
+
+
+def test_get_cd_action_already_absent():
+ """ validate cd_action for returning None """
+ current = None
+ desired = {'state': 'absent'}
+ my_obj = na_helper()
+ result = my_obj.get_cd_action(current, desired)
+ assert result is None
+
+
+def test_get_modified_attributes_for_no_data():
+ """ validate modified attributes when current is None """
+ current = None
+ desired = {'name': 'test'}
+ my_obj = na_helper()
+ result = my_obj.get_modified_attributes(current, desired)
+ assert result == {}
+
+
+def test_get_modified_attributes():
+ """ validate modified attributes """
+ current = {'name': ['test', 'abcd', 'xyz', 'pqr'], 'state': 'present'}
+ desired = {'name': ['abcd', 'abc', 'xyz', 'pqr'], 'state': 'absent'}
+ my_obj = na_helper()
+ result = my_obj.get_modified_attributes(current, desired)
+ assert result == desired
+
+
+def test_get_modified_attributes_for_intersecting_mixed_list():
+ """ validate modified attributes for list diff """
+ current = {'name': [2, 'four', 'six', 8]}
+ desired = {'name': ['a', 8, 'ab', 'four', 'abcd']}
+ my_obj = na_helper()
+ result = my_obj.get_modified_attributes(current, desired, True)
+ assert result == {'name': ['a', 'ab', 'abcd']}
+
+
+def test_get_modified_attributes_for_intersecting_list():
+ """ validate modified attributes for list diff """
+ current = {'name': ['two', 'four', 'six', 'eight']}
+ desired = {'name': ['a', 'six', 'ab', 'four', 'abc']}
+ my_obj = na_helper()
+ result = my_obj.get_modified_attributes(current, desired, True)
+ assert result == {'name': ['a', 'ab', 'abc']}
+
+
+def test_get_modified_attributes_for_nonintersecting_list():
+ """ validate modified attributes for list diff """
+ current = {'name': ['two', 'four', 'six', 'eight']}
+ desired = {'name': ['a', 'ab', 'abd']}
+ my_obj = na_helper()
+ result = my_obj.get_modified_attributes(current, desired, True)
+ assert result == {'name': ['a', 'ab', 'abd']}
+
+
+def test_get_modified_attributes_for_list_of_dicts_no_data():
+ """ validate modified attributes for list diff """
+ current = None
+ desired = {'address_blocks': [{'start': '10.20.10.40', 'size': 5}]}
+ my_obj = na_helper()
+ result = my_obj.get_modified_attributes(current, desired, True)
+ assert result == {}
+
+
+def test_get_modified_attributes_for_intersecting_list_of_dicts():
+ """ validate modified attributes for list diff """
+ current = {'address_blocks': [{'start': '10.10.10.23', 'size': 5}, {'start': '10.10.10.30', 'size': 5}]}
+ desired = {'address_blocks': [{'start': '10.10.10.23', 'size': 5}, {'start': '10.10.10.30', 'size': 5}, {'start': '10.20.10.40', 'size': 5}]}
+ my_obj = na_helper()
+ result = my_obj.get_modified_attributes(current, desired, True)
+ assert result == {'address_blocks': [{'start': '10.20.10.40', 'size': 5}]}
+
+
+def test_get_modified_attributes_for_nonintersecting_list_of_dicts():
+ """ validate modified attributes for list diff """
+ current = {'address_blocks': [{'start': '10.10.10.23', 'size': 5}, {'start': '10.10.10.30', 'size': 5}]}
+ desired = {'address_blocks': [{'start': '10.20.10.23', 'size': 5}, {'start': '10.20.10.30', 'size': 5}, {'start': '10.20.10.40', 'size': 5}]}
+ my_obj = na_helper()
+ result = my_obj.get_modified_attributes(current, desired, True)
+ assert result == {'address_blocks': [{'start': '10.20.10.23', 'size': 5}, {'start': '10.20.10.30', 'size': 5}, {'start': '10.20.10.40', 'size': 5}]}
+
+
+def test_get_modified_attributes_for_list_diff():
+ """ validate modified attributes for list diff """
+ current = {'name': ['test', 'abcd'], 'state': 'present'}
+ desired = {'name': ['abcd', 'abc'], 'state': 'present'}
+ my_obj = na_helper()
+ result = my_obj.get_modified_attributes(current, desired, True)
+ assert result == {'name': ['abc']}
+
+
+def test_get_modified_attributes_for_no_change():
+ """ validate modified attributes for same data in current and desired """
+ current = {'name': 'test'}
+ desired = {'name': 'test'}
+ my_obj = na_helper()
+ result = my_obj.get_modified_attributes(current, desired)
+ assert result == {}
+
+
+def test_get_modified_attributes_for_an_empty_desired_list():
+ """ validate modified attributes for an empty desired list """
+ current = {'snapmirror_label': ['daily', 'weekly', 'monthly'], 'state': 'present'}
+ desired = {'snapmirror_label': [], 'state': 'present'}
+ my_obj = na_helper()
+ result = my_obj.get_modified_attributes(current, desired)
+ assert result == {'snapmirror_label': []}
+
+
+def test_get_modified_attributes_for_an_empty_desired_list_diff():
+ """ validate modified attributes for an empty desired list with diff"""
+ current = {'snapmirror_label': ['daily', 'weekly', 'monthly'], 'state': 'present'}
+ desired = {'snapmirror_label': [], 'state': 'present'}
+ my_obj = na_helper()
+ result = my_obj.get_modified_attributes(current, desired, True)
+ assert result == {'snapmirror_label': []}
+
+
+def test_get_modified_attributes_for_an_empty_current_list():
+ """ validate modified attributes for an empty current list """
+ current = {'snapmirror_label': [], 'state': 'present'}
+ desired = {'snapmirror_label': ['daily', 'weekly', 'monthly'], 'state': 'present'}
+ my_obj = na_helper()
+ result = my_obj.get_modified_attributes(current, desired)
+ assert result == {'snapmirror_label': ['daily', 'weekly', 'monthly']}
+
+
+def test_get_modified_attributes_for_an_empty_current_list_diff():
+ """ validate modified attributes for an empty current list with diff"""
+ current = {'snapmirror_label': [], 'state': 'present'}
+ desired = {'snapmirror_label': ['daily', 'weekly', 'monthly'], 'state': 'present'}
+ my_obj = na_helper()
+ result = my_obj.get_modified_attributes(current, desired, True)
+ assert result == {'snapmirror_label': ['daily', 'weekly', 'monthly']}
+
+
+def test_get_modified_attributes_for_empty_lists():
+ """ validate modified attributes for empty lists """
+ current = {'snapmirror_label': [], 'state': 'present'}
+ desired = {'snapmirror_label': [], 'state': 'present'}
+ my_obj = na_helper()
+ result = my_obj.get_modified_attributes(current, desired)
+ assert result == {}
+
+
+def test_get_modified_attributes_for_empty_lists_diff():
+ """ validate modified attributes for empty lists with diff """
+ current = {'snapmirror_label': [], 'state': 'present'}
+ desired = {'snapmirror_label': [], 'state': 'present'}
+ my_obj = na_helper()
+ result = my_obj.get_modified_attributes(current, desired, True)
+ assert result == {}
+
+
+def test_get_modified_attributes_equal_lists_with_duplicates():
+ """ validate modified attributes for equal lists with duplicates """
+ current = {'schedule': ['hourly', 'daily', 'daily', 'weekly', 'monthly', 'daily'], 'state': 'present'}
+ desired = {'schedule': ['hourly', 'daily', 'daily', 'weekly', 'monthly', 'daily'], 'state': 'present'}
+ my_obj = na_helper()
+ result = my_obj.get_modified_attributes(current, desired, False)
+ assert result == {}
+
+
+def test_get_modified_attributes_equal_lists_with_duplicates_diff():
+ """ validate modified attributes for equal lists with duplicates with diff """
+ current = {'schedule': ['hourly', 'daily', 'daily', 'weekly', 'monthly', 'daily'], 'state': 'present'}
+ desired = {'schedule': ['hourly', 'daily', 'daily', 'weekly', 'monthly', 'daily'], 'state': 'present'}
+ my_obj = na_helper()
+ result = my_obj.get_modified_attributes(current, desired, True)
+ assert result == {}
+
+
+def test_get_modified_attributes_for_current_list_with_duplicates():
+ """ validate modified attributes for current list with duplicates """
+ current = {'schedule': ['hourly', 'daily', 'daily', 'weekly', 'monthly', 'daily'], 'state': 'present'}
+ desired = {'schedule': ['daily', 'daily', 'weekly', 'monthly'], 'state': 'present'}
+ my_obj = na_helper()
+ result = my_obj.get_modified_attributes(current, desired, False)
+ assert result == {'schedule': ['daily', 'daily', 'weekly', 'monthly']}
+
+
+def test_get_modified_attributes_for_current_list_with_duplicates_diff():
+ """ validate modified attributes for current list with duplicates with diff """
+ current = {'schedule': ['hourly', 'daily', 'daily', 'weekly', 'monthly', 'daily'], 'state': 'present'}
+ desired = {'schedule': ['daily', 'daily', 'weekly', 'monthly'], 'state': 'present'}
+ my_obj = na_helper()
+ result = my_obj.get_modified_attributes(current, desired, True)
+ assert result == {'schedule': []}
+
+
+def test_get_modified_attributes_for_desired_list_with_duplicates():
+ """ validate modified attributes for desired list with duplicates """
+ current = {'schedule': ['daily', 'weekly', 'monthly'], 'state': 'present'}
+ desired = {'schedule': ['hourly', 'daily', 'daily', 'weekly', 'monthly', 'daily'], 'state': 'present'}
+ my_obj = na_helper()
+ result = my_obj.get_modified_attributes(current, desired, False)
+ assert result == {'schedule': ['hourly', 'daily', 'daily', 'weekly', 'monthly', 'daily']}
+
+
+def test_get_modified_attributes_for_desired_list_with_duplicates_diff():
+ """ validate modified attributes for desired list with duplicates with diff """
+ current = {'schedule': ['daily', 'weekly', 'monthly'], 'state': 'present'}
+ desired = {'schedule': ['hourly', 'daily', 'daily', 'weekly', 'monthly', 'daily'], 'state': 'present'}
+ my_obj = na_helper()
+ result = my_obj.get_modified_attributes(current, desired, True)
+ assert result == {'schedule': ['hourly', 'daily', 'daily']}
+
+
+def test_get_modified_attributes_exceptions():
+ """ validate exceptions """
+ current = {'schedule': {'name': 'weekly'}, 'state': 'present'}
+ desired = {'schedule': 'weekly', 'state': 'present'}
+ my_obj = create_ontap_module({'hostname': None})
+ # mismatch in structure
+ error = expect_and_capture_ansible_exception(my_obj.na_helper.get_modified_attributes, TypeError, current, desired)
+ assert "Expecting dict, got: weekly with current: {'name': 'weekly'}" in error
+ # mismatch in types
+ if sys.version_info[:2] > (3, 0):
+ # our cmp function reports an exception. But python 2.x has it's own version.
+ desired = {'schedule': {'name': 12345}, 'state': 'present'}
+ error = expect_and_capture_ansible_exception(my_obj.na_helper.get_modified_attributes, TypeError, current, desired)
+ assert ("unorderable types:" in error # 3.5
+ or "'>' not supported between instances of 'str' and 'int'" in error) # 3.9
+
+
+def test_get_modified_attributes_for_dicts():
+ """ validate modified attributes for dict of dicts """
+ current = {'schedule': {'name': 'weekly'}, 'state': 'present'}
+ desired = {'schedule': {'name': 'daily'}, 'state': 'present'}
+ my_obj = na_helper()
+ result = my_obj.get_modified_attributes(current, desired, True)
+ assert result == {'schedule': {'name': 'daily'}}
+
+
+def test_is_rename_action_for_empty_input():
+ """ validate rename action for input None """
+ source = None
+ target = None
+ my_obj = na_helper()
+ result = my_obj.is_rename_action(source, target)
+ assert result == source
+
+
+def test_is_rename_action_for_no_source():
+ """ validate rename action when source is None """
+ source = None
+ target = 'test2'
+ my_obj = na_helper()
+ result = my_obj.is_rename_action(source, target)
+ assert result is False
+
+
+def test_is_rename_action_for_no_target():
+ """ validate rename action when target is None """
+ source = 'test2'
+ target = None
+ my_obj = na_helper()
+ result = my_obj.is_rename_action(source, target)
+ assert result is True
+
+
+def test_is_rename_action():
+ """ validate rename action """
+ source = 'test'
+ target = 'test2'
+ my_obj = na_helper()
+ result = my_obj.is_rename_action(source, target)
+ assert result is False
+
+
+def test_required_is_not_set_to_none():
+ """ if a key is present, without a value, Ansible sets it to None """
+ my_obj = create_ontap_module({'hostname': None})
+ msg = 'hostname requires a value, got: None'
+ assert msg == expect_and_capture_ansible_exception(my_obj.na_helper.check_and_set_parameters, 'fail', my_obj.module)['msg']
+
+ # force a value different than None
+ my_obj.module.params['hostname'] = 1
+ my_params = my_obj.na_helper.check_and_set_parameters(my_obj.module)
+ assert set(my_params.keys()) == set(['hostname', 'https', 'validate_certs', 'use_rest'])
+
+
+def test_sanitize_wwn_no_action():
+ """ no change """
+ initiator = 'tEsT'
+ expected = initiator
+ my_obj = na_helper()
+ result = my_obj.sanitize_wwn(initiator)
+ assert result == expected
+
+
+def test_sanitize_wwn_no_action_valid_iscsi():
+ """ no change """
+ initiator = 'iqn.1995-08.com.eXaMpLe:StRiNg'
+ expected = initiator
+ my_obj = na_helper()
+ result = my_obj.sanitize_wwn(initiator)
+ assert result == expected
+
+
+def test_sanitize_wwn_no_action_valid_wwn():
+ """ no change """
+ initiator = '01:02:03:04:0A:0b:0C:0d'
+ expected = initiator.lower()
+ my_obj = na_helper()
+ result = my_obj.sanitize_wwn(initiator)
+ assert result == expected
+
+
+def test_filter_empty_dict():
+ """ empty dict return empty dict """
+ my_obj = na_helper()
+ arg = {}
+ result = my_obj.filter_out_none_entries(arg)
+ assert arg == result
+
+
+def test_filter_empty_list():
+ """ empty list return empty list """
+ my_obj = na_helper()
+ arg = []
+ result = my_obj.filter_out_none_entries(arg)
+ assert arg == result
+
+
+def test_filter_typeerror_on_none():
+ """ empty list return empty list """
+ my_obj = na_helper()
+ arg = None
+ with pytest.raises(TypeError) as exc:
+ my_obj.filter_out_none_entries(arg)
+ if sys.version_info[:2] < (3, 0):
+ # the assert fails on 2.x
+ return
+ msg = "unexpected type <class 'NoneType'>"
+ assert exc.value.args[0] == msg
+
+
+def test_filter_typeerror_on_str():
+ """ empty list return empty list """
+ my_obj = na_helper()
+ arg = ""
+ with pytest.raises(TypeError) as exc:
+ my_obj.filter_out_none_entries(arg)
+ if sys.version_info[:2] < (3, 0):
+ # the assert fails on 2.x
+ return
+ msg = "unexpected type <class 'str'>"
+ assert exc.value.args[0] == msg
+
+
+def test_filter_simple_dict():
+ """ simple dict return simple dict """
+ my_obj = na_helper()
+ arg = dict(a=None, b=1, c=None, d=2, e=3)
+ expected = dict(b=1, d=2, e=3)
+ result = my_obj.filter_out_none_entries(arg)
+ assert expected == result
+
+
+def test_filter_simple_list():
+ """ simple list return simple list """
+ my_obj = na_helper()
+ arg = [None, 2, 3, None, 5]
+ expected = [2, 3, 5]
+ result = my_obj.filter_out_none_entries(arg)
+ assert expected == result
+
+
+def test_filter_dict_dict():
+ """ simple dict return simple dict """
+ my_obj = na_helper()
+ arg = dict(a=None, b=dict(u=1, v=None, w=2), c={}, d=2, e=3)
+ expected = dict(b=dict(u=1, w=2), d=2, e=3)
+ result = my_obj.filter_out_none_entries(arg)
+ assert expected == result
+
+
+def test_filter_list_list():
+ """ simple list return simple list """
+ my_obj = na_helper()
+ arg = [None, [1, None, 3], 3, None, 5]
+ expected = [[1, 3], 3, 5]
+ result = my_obj.filter_out_none_entries(arg)
+ assert expected == result
+
+
+def test_filter_dict_list_dict():
+ """ simple dict return simple dict """
+ my_obj = na_helper()
+ arg = dict(a=None, b=[dict(u=1, v=None, w=2), 5, None, dict(x=6, y=None)], c={}, d=2, e=3)
+ expected = dict(b=[dict(u=1, w=2), 5, dict(x=6)], d=2, e=3)
+ result = my_obj.filter_out_none_entries(arg)
+ assert expected == result
+
+
+def test_filter_list_dict_list():
+ """ simple list return simple list """
+ my_obj = na_helper()
+ arg = [None, [1, None, 3], dict(a=None, b=[7, None, 9], c=None, d=dict(u=None, v=10)), None, 5]
+ expected = [[1, 3], dict(b=[7, 9], d=dict(v=10)), 5]
+ result = my_obj.filter_out_none_entries(arg)
+ assert expected == result
+
+
+def test_convert_value():
+ """ positive tests """
+ my_obj = na_helper()
+ for value, convert_to, expected in [
+ ('any', None, 'any'),
+ (12345, None, 12345),
+ ('12345', int, 12345),
+ ('any', str, 'any'),
+ ('true', bool, True),
+ ('false', bool, False),
+ ('online', 'bool_online', True),
+ ('any', 'bool_online', False),
+ ]:
+ result, error = my_obj.convert_value(value, convert_to)
+ assert error is None
+ assert expected == result
+
+
+def test_convert_value_with_error():
+ """ negative tests """
+ my_obj = na_helper()
+ for value, convert_to, expected in [
+ (12345, 'any', "Unexpected type:"),
+ ('any', int, "Unexpected value for int: any"),
+ ('any', bool, "Unexpected value: any received from ZAPI for boolean attribute"),
+ ]:
+ result, error = my_obj.convert_value(value, convert_to)
+ print(value, convert_to, result, '"%s"' % expected, '"%s"' % error)
+ assert result is None
+ assert expected in error
+
+
+def test_convert_value_with_exception():
+ """ negative tests """
+ my_obj = create_ontap_module({'hostname': None})
+ expect_and_capture_ansible_exception(my_obj.na_helper.convert_value, 'fail', 'any', 'any')
+
+
+def get_zapi_info():
+ return {
+ 'a': {'b': '12345', 'bad_stuff': ['a', 'b'], 'none_stuff': None}
+ }
+
+
+def get_zapi_na_element(zapi_info):
+ na_element, valid = build_zapi_response(zapi_info)
+ if valid != 'valid' and sys.version_info[:2] < (2, 7):
+ pytest.skip('Skipping Unit Tests on 2.6 as netapp-lib is not available')
+ assert valid == 'valid'
+ return na_element
+
+
+def test_zapi_get_value():
+ na_element = get_zapi_na_element(get_zapi_info())
+ my_obj = na_helper()
+ assert my_obj.zapi_get_value(na_element, ['a', 'b'], convert_to=int) == 12345
+ # missing key returns None if sparse dict is allowed (default)
+ assert my_obj.zapi_get_value(na_element, ['a', 'c'], convert_to=int) is None
+ # missing key returns 'default' - note, no conversion - if sparse dict is allowed (default)
+ assert my_obj.zapi_get_value(na_element, ['a', 'c'], convert_to=int, default='default') == 'default'
+
+
+def test_zapi_get_value_with_exception():
+ na_element = get_zapi_na_element(get_zapi_info())
+ my_obj = create_ontap_module({'hostname': None})
+ # KeyError
+ error = expect_and_capture_ansible_exception(my_obj.na_helper.zapi_get_value, 'fail', na_element, ['a', 'c'], required=True)['msg']
+ assert 'No element by given name c.' in error
+
+
+def test_safe_get():
+ na_element = get_zapi_na_element(get_zapi_info())
+ my_obj = na_helper()
+ assert my_obj.safe_get(na_element, ['a', 'b']) == '12345'
+ assert my_obj.safe_get(na_element, ['a', 'c']) is None
+ assert my_obj.safe_get(get_zapi_info(), ['a', 'b']) == '12345'
+ assert my_obj.safe_get(get_zapi_info(), ['a', 'c']) is None
+ assert my_obj.safe_get(get_zapi_info(), ['a', 'none_stuff', 'extra']) is None # TypeError on None
+
+
+def test_safe_get_dict_of_list():
+ my_obj = na_helper()
+ my_dict = {'a': ['b', 'c', {'d': ['e']}]}
+ assert my_obj.safe_get(my_dict, ['a', 0]) == 'b'
+ assert my_obj.safe_get(my_dict, ['a', 2, 'd', 0]) == 'e'
+ assert my_obj.safe_get(my_dict, ['a', 3]) is None
+
+
+def test_safe_get_with_exception():
+ na_element = get_zapi_na_element(get_zapi_info())
+ my_obj = create_ontap_module({'hostname': None})
+ # KeyError
+ error = expect_and_capture_ansible_exception(my_obj.na_helper.safe_get, KeyError, na_element, ['a', 'c'], allow_sparse_dict=False)
+ assert 'No element by given name c.' in error
+ error = expect_and_capture_ansible_exception(my_obj.na_helper.safe_get, KeyError, get_zapi_info(), ['a', 'c'], allow_sparse_dict=False)
+ assert 'c' == error
+ # IndexError
+ error = expect_and_capture_ansible_exception(my_obj.na_helper.safe_get, IndexError, get_zapi_info(), ['a', 'bad_stuff', 4], allow_sparse_dict=False)
+ print('EXC', error)
+ if ut_utilities.is_indexerror_exception_formatted():
+ assert 'list index out of range' in str(error)
+ error = expect_and_capture_ansible_exception(my_obj.na_helper.safe_get, IndexError, get_zapi_info(), ['a', 'bad_stuff', -4], allow_sparse_dict=False)
+ print('EXC', error)
+ if ut_utilities.is_indexerror_exception_formatted():
+ assert 'list index out of range' in str(error)
+ # TypeError - not sure I can build a valid ZAPI NaElement that can give a type error, but using a dict worked.
+ error = expect_and_capture_ansible_exception(my_obj.na_helper.safe_get, TypeError, get_zapi_info(), ['a', 'bad_stuff', 'extra'], allow_sparse_dict=False)
+ # 'list indices must be integers, not str' with 2.7
+ # 'list indices must be integers or slices, not str' with 3.x
+ assert 'list indices must be integers' in error
+ error = expect_and_capture_ansible_exception(my_obj.na_helper.safe_get, TypeError, get_zapi_info(), ['a', 'none_stuff', 'extra'], allow_sparse_dict=False)
+ # 'NoneType' object has no attribute '__getitem__' with 2.7
+ # 'NoneType' object is not subscriptable with 3.x
+ assert "'NoneType' object " in error
+
+
+def test_get_value_for_bool():
+ my_obj = na_helper()
+ for value, from_zapi, expected in [
+ (None, 'any', None),
+ ('true', True, True),
+ ('false', True, False),
+ ('any', True, False), # no error checking if key is not present
+ (True, False, 'true'),
+ (False, False, 'false'),
+ ('any', False, 'true'), # no error checking if key is not present
+ ]:
+ result = my_obj.get_value_for_bool(from_zapi, value)
+ print(value, from_zapi, result)
+ assert result == expected
+
+
+def test_get_value_for_bool_with_exception():
+ na_element = get_zapi_na_element(get_zapi_info())
+ my_obj = create_ontap_module({'hostname': None})
+ # Error with from_zapi=True if key is present
+ error = expect_and_capture_ansible_exception(my_obj.na_helper.get_value_for_bool, TypeError, True, 1234, 'key')
+ assert "expecting 'str' type for 'key': 1234" in error
+ error = expect_and_capture_ansible_exception(my_obj.na_helper.get_value_for_bool, ValueError, True, 'any', 'key')
+ assert "Unexpected value: 'any' received from ZAPI for boolean attribute: 'key'" == error
+ # TypeError - expecting a bool
+ error = expect_and_capture_ansible_exception(my_obj.na_helper.get_value_for_bool, TypeError, False, 'any', 'key')
+ assert "expecting 'bool' type for 'key': 'any'" in error
+
+
+def test_get_value_for_int():
+ my_obj = na_helper()
+ for value, from_zapi, expected in [
+ (None, 'any', None),
+ ('12345', True, 12345),
+ (12345, True, 12345), # no error checking if key is not present
+ (12345, False, '12345'),
+ ]:
+ result = my_obj.get_value_for_int(from_zapi, value)
+ print(value, from_zapi, result)
+ assert result == expected
+
+
+def test_get_value_for_int_with_exception():
+ na_element = get_zapi_na_element(get_zapi_info())
+ my_obj = create_ontap_module({'hostname': None})
+ # Error with from_zapi=True if key is present
+ error = expect_and_capture_ansible_exception(my_obj.na_helper.get_value_for_int, TypeError, True, 1234, 'key')
+ assert "expecting 'str' type for 'key': 1234" in error
+ error = expect_and_capture_ansible_exception(my_obj.na_helper.get_value_for_int, ValueError, True, 'any', 'key')
+ assert "invalid literal for int() with base 10: 'any'" == error
+ # TypeError - expecting a int
+ error = expect_and_capture_ansible_exception(my_obj.na_helper.get_value_for_int, TypeError, False, 'any', 'key')
+ assert "expecting 'int' type for 'key': 'any'" in error
+
+
+def test_get_value_for_list():
+ my_obj = na_helper()
+ zapi_info = {
+ 'a': [{'b': 'a1'}, {'b': 'a2'}, {'b': 'a3'}]
+ }
+ for from_zapi, zapi_parent, zapi_child, data, expected in [
+ (True, None, None, None, []),
+ (True, get_zapi_na_element(zapi_info), None, None, [None]),
+ (True, get_zapi_na_element(get_zapi_info()).get_child_by_name('a'), None, None, ['12345', None, None]),
+ (True, get_zapi_na_element(zapi_info).get_child_by_name('a'), None, None, ['a1', 'a2', 'a3']),
+ (False, 'parent', 'child', [], b'<parent/>'),
+ (False, 'parent', 'child', ['1', '1'], b'<parent><child>1</child><child>1</child></parent>'),
+ ]:
+ result = my_obj.get_value_for_list(from_zapi, zapi_parent, zapi_child, data)
+ print(from_zapi, expected, result)
+ if from_zapi:
+ if zapi_parent:
+ print(zapi_parent.to_string())
+ # ordering maybe different with 3.5 compared to 3.9 or 2.7
+ assert set(result) == set(expected)
+ else:
+ print(result.to_string())
+ assert result.to_string() == expected
+
+
+def test_zapi_get_attrs():
+ my_obj = na_helper()
+ zapi_info = {
+ 'a': {'b': 'a1', 'c': 'a2', 'd': 'a3', 'int': '123'}
+ }
+ naelement = get_zapi_na_element(zapi_info)
+ attr_dict = {
+ 'first': {'key_list': ['a', 'b']}
+ }
+ result = {}
+ my_obj.zapi_get_attrs(naelement, attr_dict, result)
+ assert result == {'first': 'a1'}
+
+ # if element not found return None, unless omitnone is True
+ attr_dict = {
+ 'none': {'key_list': ['a', 'z'], 'omitnone': True}
+ }
+ my_obj.zapi_get_attrs(naelement, attr_dict, result)
+ assert result == {'first': 'a1'}
+
+ # if element not found return None when required and omitnone are False
+ attr_dict = {
+ 'none': {'key_list': ['a', 'z']}
+ }
+ my_obj.zapi_get_attrs(naelement, attr_dict, result)
+ assert result == {'first': 'a1', 'none': None}
+
+ # if element not found return default
+ result = {}
+ attr_dict = {
+ 'none': {'key_list': ['a', 'z'], 'default': 'some_default'}
+ }
+ my_obj.zapi_get_attrs(naelement, attr_dict, result)
+ assert result == {'none': 'some_default'}
+
+ # convert to int
+ result = {}
+ attr_dict = {
+ 'int': {'key_list': ['a', 'int'], 'convert_to': int}
+ }
+ my_obj.zapi_get_attrs(naelement, attr_dict, result)
+ assert result == {'int': 123}
+
+ # if element not found return None, unless required is True
+ my_obj = create_ontap_module({'hostname': 'abc'})
+ attr_dict = {
+ 'none': {'key_list': ['a', 'z'], 'required': True}
+ }
+ # the contents of to_string() may be in a different sequence depending on the pytohn version
+ assert expect_and_capture_ansible_exception(my_obj.na_helper.zapi_get_attrs, 'fail', naelement, attr_dict, result)['msg'].startswith((
+ "Error reading ['a', 'z'] from b'<results status=\"passed\"><a>", # python 3.x
+ "Error reading ['a', 'z'] from <results status=\"passed\"><a>" # python 2.7
+ ))
+
+
+def test_set_parameters():
+ my_obj = na_helper()
+ adict = dict((x, x * x) for x in range(10))
+ assert my_obj.set_parameters(adict) == adict
+ assert my_obj.parameters == adict
+ assert len(my_obj.parameters) == 10
+
+ # None values are copied
+ adict[3] = None
+ assert my_obj.set_parameters(adict) != adict
+ assert my_obj.parameters != adict
+ assert len(my_obj.parameters) == 9
+
+
+def test_get_caller():
+ assert na_helper.get_caller(0) == 'get_caller'
+ assert na_helper.get_caller(1) == 'test_get_caller'
+
+ def one(depth):
+ return na_helper.get_caller(depth)
+ assert one(1) == 'one'
+
+ def two():
+ return one(2)
+ assert two() == 'two'
+
+ def three():
+ return two(), one(3)
+ assert three() == ('two', 'test_get_caller')
+
+
+@patch('traceback.extract_stack')
+def test_get_caller_2_7(mock_frame):
+ frame = ('first', 'second', 'function_name')
+ mock_frame.return_value = [frame]
+ assert na_helper.get_caller(0) == 'function_name'
+
+
+@patch('traceback.extract_stack')
+def test_get_caller_bad_format(mock_frame):
+ frame = ('first', 'second')
+ mock_frame.return_value = [frame]
+ assert na_helper.get_caller(0) == "Error retrieving function name: tuple index out of range - [('first', 'second')]"
+
+
+def test_fail_on_error():
+ my_obj = create_ontap_module({'hostname': 'abc'})
+ assert my_obj.na_helper.fail_on_error(None) is None
+ assert expect_and_capture_ansible_exception(my_obj.na_helper.fail_on_error, 'fail', 'error_msg')['msg'] ==\
+ 'Error in expect_and_capture_ansible_exception: error_msg'
+ assert expect_and_capture_ansible_exception(my_obj.na_helper.fail_on_error, 'fail', 'error_msg', 'api')['msg'] ==\
+ 'Error in expect_and_capture_ansible_exception: calling api: api: error_msg'
+ previous_errors = ['some_errror']
+ exc = expect_and_capture_ansible_exception(my_obj.na_helper.fail_on_error, 'fail', 'error_msg', 'api', previous_errors=previous_errors)
+ assert exc['msg'] == 'Error in expect_and_capture_ansible_exception: calling api: api: error_msg'
+ assert exc['previous_errors'] == previous_errors[0]
+ exc = expect_and_capture_ansible_exception(my_obj.na_helper.fail_on_error, 'fail', 'error_msg', 'api', True)
+ assert exc['msg'] == 'Error in expect_and_capture_ansible_exception: calling api: api: error_msg'
+ assert exc['stack']
+ delattr(my_obj.na_helper, 'ansible_module')
+ assert expect_and_capture_ansible_exception(my_obj.na_helper.fail_on_error, AttributeError, 'error_message') ==\
+ "Expecting self.ansible_module to be set when reporting {'msg': 'Error in expect_and_capture_ansible_exception: error_message'}"
+
+
+def test_cmp():
+ assert na_cmp(None, 'any') == -1
+ # string comparison ignores case
+ assert na_cmp('ABC', 'abc') == 0
+ assert na_cmp('abcd', 'abc') == 1
+ assert na_cmp('abd', 'abc') == 1
+ assert na_cmp(['abd', 'abc'], ['abc', 'abd']) == 0
+ # list comparison ignores case
+ assert na_cmp(['ABD', 'abc'], ['abc', 'abd']) == 0
+ # but not duplicates
+ assert na_cmp(['ABD', 'ABD', 'abc'], ['abc', 'abd']) == 1
+
+
+def test_fall_back_to_zapi():
+ my_obj = create_ontap_module({'hostname': 'abc'}, version=2)
+ parameters = {'use_rest': 'never'}
+ assert my_obj.na_helper.fall_back_to_zapi(my_obj.na_helper.ansible_module, 'some message', parameters) is None
+ assert_no_warnings()
+
+ parameters = {'use_rest': 'auto'}
+ assert my_obj.na_helper.fall_back_to_zapi(my_obj.na_helper.ansible_module, 'some message', parameters) is False
+ assert_warning_was_raised('Falling back to ZAPI: some message')
+
+ parameters = {'use_rest': 'always'}
+ clear_warnings()
+ assert 'Error: some message' in expect_and_capture_ansible_exception(
+ my_obj.na_helper.fall_back_to_zapi, 'fail', my_obj.na_helper.ansible_module, 'some message', parameters)['msg']
+ assert_no_warnings()
+
+
+def test_module_deprecated():
+ my_obj = create_ontap_module({'hostname': 'abc'})
+ assert my_obj.na_helper.module_deprecated(my_obj.na_helper.ansible_module) is None
+ assert_warning_was_raised('This module only supports ZAPI and is deprecated. It will no longer work with newer versions of ONTAP. '
+ 'The final ONTAP version to support ZAPI is ONTAP 9.12.1.')
+
+
+def test_module_replaces():
+ my_obj = create_ontap_module({'hostname': 'abc'})
+ new_module = 'na_ontap_new_modules'
+ assert my_obj.na_helper.module_replaces(new_module, my_obj.na_helper.ansible_module) is None
+ assert_warning_was_raised('netapp.ontap.%s should be used instead.' % new_module)
+
+
+def test_compare_chmod_value():
+ myobj = na_helper()
+ assert myobj.compare_chmod_value("0777", "---rwxrwxrwx") is True
+ assert myobj.compare_chmod_value("777", "---rwxrwxrwx") is True
+ assert myobj.compare_chmod_value("7777", "sstrwxrwxrwx") is True
+ assert myobj.compare_chmod_value("4555", "s--r-xr-xr-x") is True
+ assert myobj.compare_chmod_value(None, "---rwxrwxrwx") is False
+ assert myobj.compare_chmod_value("755", "rwxrwxrwxrwxr") is False
+ assert myobj.compare_chmod_value("777", "---ssxrwxrwx") is False
+ assert myobj.compare_chmod_value("7777", "rwxrwxrwxrwx") is False
+ assert myobj.compare_chmod_value("7777", "7777") is True
+
+
+def test_ignore_missing_vserver_on_delete():
+ my_obj = create_ontap_module({'hostname': 'abc'})
+ assert not my_obj.na_helper.ignore_missing_vserver_on_delete('error')
+ my_obj.na_helper.parameters['state'] = 'absent'
+ error = 'Internal error, vserver name is required, when processing error: error_msg'
+ assert error in expect_and_capture_ansible_exception(my_obj.na_helper.ignore_missing_vserver_on_delete, 'fail', 'error_msg')['msg']
+ my_obj.na_helper.parameters['vserver'] = 'svm'
+ error = 'Internal error, error should contain "message" key, found:'
+ assert error in expect_and_capture_ansible_exception(my_obj.na_helper.ignore_missing_vserver_on_delete, 'fail', {'error_msg': 'error'})['msg']
+ error = 'Internal error, error should be str or dict, found:'
+ assert error in expect_and_capture_ansible_exception(my_obj.na_helper.ignore_missing_vserver_on_delete, 'fail', ['error_msg'])['msg']
+ assert not my_obj.na_helper.ignore_missing_vserver_on_delete('error')
+ assert my_obj.na_helper.ignore_missing_vserver_on_delete({'message': 'SVM "svm" does not exist.'})
+
+
+def test_remove_hal_links():
+ my_obj = create_ontap_module({'hostname': 'abc'})
+ assert my_obj.na_helper.remove_hal_links(None) is None
+ assert my_obj.na_helper.remove_hal_links('string') is None
+ adict = {
+ '_links': 'whatever',
+ 'other': 'other'
+ }
+ # dict
+ test_object = copy.deepcopy(adict)
+ assert my_obj.na_helper.remove_hal_links(test_object) is None
+ assert '_links' not in test_object
+ # list of dicts
+ test_object = [copy.deepcopy(adict)] * 5
+ assert my_obj.na_helper.remove_hal_links(test_object) is None
+ assert all('_links' not in elem for elem in test_object)
+ # dict of dicts
+ test_object = {'a': copy.deepcopy(adict), 'b': copy.deepcopy(adict)}
+ assert my_obj.na_helper.remove_hal_links(test_object) is None
+ assert all('_links' not in value for value in test_object.values())
+ # list of list of dicts
+ items = [copy.deepcopy(adict)] * 5
+ test_object = [items, items]
+ assert my_obj.na_helper.remove_hal_links(test_object) is None
+ assert all('_links' not in elem for elems in test_object for elem in elems)
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_netapp_rest.py b/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_netapp_rest.py
new file mode 100644
index 000000000..98cac5390
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_netapp_rest.py
@@ -0,0 +1,586 @@
+# Copyright (c) 2018-2022 NetApp
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests for module_utils netapp.py - REST features '''
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import os.path
+import pytest
+import sys
+import tempfile
+
+from ansible.module_utils import basic
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import call, patch
+
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import \
+ assert_no_warnings, assert_warning_was_raised, create_module, expect_and_capture_ansible_exception, patch_ansible, print_warnings
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+VERSION = {'version': {
+ 'full': '9.8.45',
+ 'generation': 9,
+ 'major': 8,
+ 'minor': 45
+}}
+
+SRR = rest_responses({
+ 'vservers_with_admin': (200, {
+ 'records': [
+ {'vserver': 'vserver1', 'type': 'data '},
+ {'vserver': 'vserver2', 'type': 'data '},
+ {'vserver': 'cserver', 'type': 'admin'}
+ ]}, None),
+ 'vservers_without_admin': (200, {
+ 'records': [
+ {'vserver': 'vserver1', 'type': 'data '},
+ {'vserver': 'vserver2', 'type': 'data '},
+ ]}, None),
+ 'vservers_single': (200, {
+ 'records': [
+ {'vserver': 'single', 'type': 'data '},
+ ]}, None),
+ 'vservers_empty': (200, {}, None),
+ 'vservers_error': (200, {
+ 'records': [
+ {'vserver': 'single', 'type': 'data '},
+ ]}, 'some error'),
+ 'nodes': (200, {
+ 'records': [
+ VERSION,
+ {'node': 'node2', 'version': 'version'},
+ ]}, None),
+ 'precluster_error': (400, {}, {'message': 'are available in precluster.'}),
+})
+
+DEFAULT_ARGS = {
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'cert_filepath': None,
+ 'key_filepath': None,
+}
+
+CERT_ARGS = {
+ 'hostname': 'test',
+ 'cert_filepath': 'test_pem.pem',
+ 'key_filepath': 'test_key.key'
+}
+
+
+class MockONTAPModule:
+ def __init__(self):
+ self.module = basic.AnsibleModule(netapp_utils.na_ontap_host_argument_spec())
+
+
+def create_restapi_object(default_args, module_args=None):
+ module = create_module(MockONTAPModule, default_args, module_args)
+ return netapp_utils.OntapRestAPI(module.module)
+
+
+def test_write_to_file():
+ ''' check error and debug logs can be written to disk '''
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ # logging an error also add a debug record
+ rest_api.log_error(404, '404 error')
+ print(rest_api.errors)
+ print(rest_api.debug_logs)
+ # logging a debug record only
+ rest_api.log_debug(501, '501 error')
+ print(rest_api.errors)
+ print(rest_api.debug_logs)
+
+ try:
+ tempdir = tempfile.TemporaryDirectory()
+ filepath = os.path.join(tempdir.name, 'log.txt')
+ except AttributeError:
+ # python 2.7 does not support tempfile.TemporaryDirectory
+ # we're taking a small chance that there is a race condition
+ filepath = '/tmp/deleteme354.txt'
+ rest_api.write_debug_log_to_file(filepath=filepath, append=False)
+ with open(filepath, 'r') as log:
+ lines = log.readlines()
+ assert len(lines) == 4
+ assert lines[0].strip() == 'Debug: 404'
+ assert lines[2].strip() == 'Debug: 501'
+
+ # Idempotent, as append is False
+ rest_api.write_debug_log_to_file(filepath=filepath, append=False)
+ with open(filepath, 'r') as log:
+ lines = log.readlines()
+ assert len(lines) == 4
+ assert lines[0].strip() == 'Debug: 404'
+ assert lines[2].strip() == 'Debug: 501'
+
+ # Duplication, as append is True
+ rest_api.write_debug_log_to_file(filepath=filepath, append=True)
+ with open(filepath, 'r') as log:
+ lines = log.readlines()
+ assert len(lines) == 8
+ assert lines[0].strip() == 'Debug: 404'
+ assert lines[2].strip() == 'Debug: 501'
+ assert lines[4].strip() == 'Debug: 404'
+ assert lines[6].strip() == 'Debug: 501'
+
+ rest_api.write_errors_to_file(filepath=filepath, append=False)
+ with open(filepath, 'r') as log:
+ lines = log.readlines()
+ assert len(lines) == 1
+ assert lines[0].strip() == 'Error: 404 error'
+
+ # Idempotent, as append is False
+ rest_api.write_errors_to_file(filepath=filepath, append=False)
+ with open(filepath, 'r') as log:
+ lines = log.readlines()
+ assert len(lines) == 1
+ assert lines[0].strip() == 'Error: 404 error'
+
+ # Duplication, as append is True
+ rest_api.write_errors_to_file(filepath=filepath, append=True)
+ with open(filepath, 'r') as log:
+ lines = log.readlines()
+ assert len(lines) == 2
+ assert lines[0].strip() == 'Error: 404 error'
+ assert lines[1].strip() == 'Error: 404 error'
+
+ # Empty data
+ rest_api.write_to_file(tag='atag', filepath=filepath, append=False)
+ with open(filepath, 'r') as log:
+ lines = log.readlines()
+ assert len(lines) == 1
+ assert lines[0].strip() == 'atag'
+
+ builtins = 'builtins' if sys.version_info > (3, 0) else '__builtin__'
+
+ with patch('%s.open' % builtins) as mock_open:
+ mock_open.side_effect = KeyError('Open error')
+ exc = expect_and_capture_ansible_exception(rest_api.write_to_file, KeyError, tag='atag')
+ assert str(exc) == 'Open error'
+ print(mock_open.mock_calls)
+ assert call('/tmp/ontap_log', 'a') in mock_open.mock_calls
+
+
+def test_is_rest_true():
+ ''' is_rest is expected to return True '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ is_rest = rest_api.is_rest()
+ print(rest_api.errors)
+ print(rest_api.debug_logs)
+ assert is_rest
+
+
+def test_is_rest_false():
+ ''' is_rest is expected to return False '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_zapi']),
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ is_rest = rest_api.is_rest()
+ print(rest_api.errors)
+ print(rest_api.debug_logs)
+ assert not is_rest
+ assert rest_api.errors[0] == SRR['is_zapi'][2]
+ assert rest_api.debug_logs[0][0] == SRR['is_zapi'][0] # status_code
+ assert rest_api.debug_logs[0][1] == SRR['is_zapi'][2] # error
+
+
+def test_is_rest_false_9_5():
+ ''' is_rest is expected to return False '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_95']),
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ is_rest = rest_api.is_rest()
+ print(rest_api.errors)
+ print(rest_api.debug_logs)
+ assert not is_rest
+ assert not rest_api.errors
+ assert not rest_api.debug_logs
+
+
+def test_is_rest_true_9_6():
+ ''' is_rest is expected to return False '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ is_rest = rest_api.is_rest()
+ print(rest_api.errors)
+ print(rest_api.debug_logs)
+ assert is_rest
+ assert not rest_api.errors
+ assert not rest_api.debug_logs
+
+
+def test_fail_has_username_password_and_cert():
+ ''' failure case in auth_method '''
+ module_args = dict(cert_filepath='dummy')
+ msg = 'Error: cannot have both basic authentication (username/password) and certificate authentication (cert/key files)'
+ assert expect_and_capture_ansible_exception(create_restapi_object, 'fail', DEFAULT_ARGS, module_args)['msg'] == msg
+
+
+def test_fail_has_username_password_and_key():
+ ''' failure case in auth_method '''
+ module_args = dict(key_filepath='dummy')
+ msg = 'Error: cannot have both basic authentication (username/password) and certificate authentication (cert/key files)'
+ assert expect_and_capture_ansible_exception(create_restapi_object, 'fail', DEFAULT_ARGS, module_args)['msg'] == msg
+
+
+def test_fail_has_username_and_cert():
+ ''' failure case in auth_method '''
+ args = dict(DEFAULT_ARGS)
+ module_args = dict(cert_filepath='dummy')
+ del args['password']
+ msg = 'Error: username and password have to be provided together and cannot be used with cert or key files'
+ assert expect_and_capture_ansible_exception(create_restapi_object, 'fail', args, module_args)['msg'] == msg
+
+
+def test_fail_has_password_and_cert():
+ ''' failure case in auth_method '''
+ args = dict(DEFAULT_ARGS)
+ module_args = dict(cert_filepath='dummy')
+ del args['username']
+ msg = 'Error: username and password have to be provided together and cannot be used with cert or key files'
+ assert expect_and_capture_ansible_exception(create_restapi_object, 'fail', args, module_args)['msg'] == msg
+
+
+def test_has_username_password():
+ ''' auth_method reports expected value '''
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ assert rest_api.auth_method == 'speedy_basic_auth'
+
+
+def test_has_cert_no_key():
+ ''' auth_method reports expected value '''
+ args = dict(CERT_ARGS)
+ del args['key_filepath']
+ rest_api = create_restapi_object(args)
+ assert rest_api.auth_method == 'single_cert'
+
+
+def test_has_cert_and_key():
+ ''' auth_method reports expected value '''
+ rest_api = create_restapi_object(CERT_ARGS)
+ assert rest_api.auth_method == 'cert_key'
+
+
+def test_get_cserver():
+ ''' using REST to get cserver - not sure if it's needed '''
+ register_responses([
+ ('GET', 'private/cli/vserver', SRR['vservers_with_admin']),
+ ('GET', 'private/cli/vserver', SRR['vservers_without_admin']),
+ ('GET', 'private/cli/vserver', SRR['vservers_single']),
+ ('GET', 'private/cli/vserver', SRR['vservers_empty']),
+ ('GET', 'private/cli/vserver', SRR['vservers_error']),
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ assert netapp_utils.get_cserver(rest_api, is_rest=True) == 'cserver'
+ assert netapp_utils.get_cserver(rest_api, is_rest=True) is None
+ assert netapp_utils.get_cserver(rest_api, is_rest=True) == 'single'
+ assert netapp_utils.get_cserver(rest_api, is_rest=True) is None
+ assert netapp_utils.get_cserver(rest_api, is_rest=True) is None
+
+
+def test_ontaprestapi_init():
+ module_args = {'http_port': 123}
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ assert rest_api.url == 'https://%s/api/' % DEFAULT_ARGS['hostname']
+ rest_api = create_restapi_object(DEFAULT_ARGS, module_args)
+ assert rest_api.url == 'https://%s:%d/api/' % (DEFAULT_ARGS['hostname'], module_args['http_port'])
+
+
+@patch('logging.basicConfig')
+def test_ontaprestapi_logging(mock_config):
+ create_restapi_object(DEFAULT_ARGS)
+ assert not mock_config.mock_calls
+ module_args = {'feature_flags': {'trace_apis': True}}
+ create_restapi_object(DEFAULT_ARGS, module_args)
+ assert len(mock_config.mock_calls) == 1
+
+
+def test_requires_ontap_9_6():
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ assert rest_api.requires_ontap_9_6('module_name') == 'module_name only supports REST, and requires ONTAP 9.6 or later.'
+
+
+def test_requires_ontap_version():
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ assert rest_api.requires_ontap_version('module_name', '9.1.2') == 'module_name only supports REST, and requires ONTAP 9.1.2 or later.'
+
+
+def test_options_require_ontap_version():
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ base = 'using %s requires ONTAP 9.1.2 or later and REST must be enabled'
+ msg = base % 'option_name'
+ msg_m = base % "any of ['op1', 'op2', 'op3']"
+ assert rest_api.options_require_ontap_version('option_name', '9.1.2') == '%s.' % msg
+ assert rest_api.options_require_ontap_version('option_name', '9.1.2', use_rest=True) == '%s - using REST.' % msg
+ assert rest_api.options_require_ontap_version('option_name', '9.1.2', use_rest=False) == '%s - using ZAPI.' % msg
+ assert rest_api.options_require_ontap_version(['op1', 'op2', 'op3'], '9.1.2') == '%s.' % msg_m
+ rest_api.set_version(VERSION)
+ assert rest_api.options_require_ontap_version(['option_name'], '9.1.2') == '%s - ONTAP version: %s.' % (msg, VERSION['version']['full'])
+ assert rest_api.options_require_ontap_version(['op1', 'op2', 'op3'], '9.1.2', use_rest=True) ==\
+ '%s - ONTAP version: %s - using REST.' % (msg_m, VERSION['version']['full'])
+
+
+def test_meets_rest_minimum_version():
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ rest_api.set_version(VERSION)
+ assert rest_api.meets_rest_minimum_version(True, VERSION['version']['generation'], VERSION['version']['major'])
+ assert rest_api.meets_rest_minimum_version(True, VERSION['version']['generation'], VERSION['version']['major'] - 1)
+ assert not rest_api.meets_rest_minimum_version(True, VERSION['version']['generation'], VERSION['version']['major'] + 1)
+ assert not rest_api.meets_rest_minimum_version(True, VERSION['version']['generation'], VERSION['version']['major'], VERSION['version']['minor'] + 1)
+
+
+def test_fail_if_not_rest_minimum_version():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'cluster', SRR['generic_error']),
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ rest_api.use_rest = 'never'
+ # validate consistency bug in fail_if_not_rest_minimum_version
+ assert expect_and_capture_ansible_exception(rest_api.fail_if_not_rest_minimum_version, 'fail', 'module_name', 9, 6)['msg'] ==\
+ 'Error: REST is required for this module, found: "use_rest: never".'
+ # never
+ rest_api = create_restapi_object(DEFAULT_ARGS, {'use_rest': 'never'})
+ assert expect_and_capture_ansible_exception(rest_api.fail_if_not_rest_minimum_version, 'fail', 'module_name', 9, 6)['msg'] ==\
+ 'Error: REST is required for this module, found: "use_rest: never".'
+ # REST error
+ rest_api = create_restapi_object(DEFAULT_ARGS, {'use_rest': 'auto'})
+ assert expect_and_capture_ansible_exception(rest_api.fail_if_not_rest_minimum_version, 'fail', 'module_name', 9, 6)['msg'] ==\
+ 'Error using REST for version, error: Expected error. Error using REST for version, status_code: 400.'
+ # version mismatch
+ assert expect_and_capture_ansible_exception(rest_api.fail_if_not_rest_minimum_version, 'fail', 'module_name', 9, 7)['msg'] ==\
+ 'Error: module_name only supports REST, and requires ONTAP 9.7.0 or later. Found: 9.6.0.'
+ # version match
+ assert rest_api.fail_if_not_rest_minimum_version('module_name', 9, 6) is None
+
+
+def test_check_required_library():
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ msg = 'Failed to import the required Python library (requests)'
+ with patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.HAS_REQUESTS', False):
+ assert expect_and_capture_ansible_exception(rest_api.check_required_library, 'fail')['msg'].startswith(msg)
+
+
+def test_build_headers():
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ app_version = 'basic.py/%s' % netapp_utils.COLLECTION_VERSION
+ assert rest_api.build_headers() == {'X-Dot-Client-App': app_version}
+ assert rest_api.build_headers(accept='accept') == {'X-Dot-Client-App': app_version, 'accept': 'accept'}
+ assert rest_api.build_headers(vserver_name='vserver_name') == {'X-Dot-Client-App': app_version, 'X-Dot-SVM-Name': 'vserver_name'}
+ assert rest_api.build_headers(vserver_uuid='vserver_uuid') == {'X-Dot-Client-App': app_version, 'X-Dot-SVM-UUID': 'vserver_uuid'}
+ assert len(rest_api.build_headers(accept='accept', vserver_name='name', vserver_uuid='uuid')) == 4
+
+
+def test_get_method():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ])
+ assert create_restapi_object(DEFAULT_ARGS).get('cluster') == (SRR['is_rest_96'][1], None)
+
+
+def test_post_method():
+ register_responses([
+ ('POST', 'cluster', SRR['is_rest_96']),
+ ])
+ assert create_restapi_object(DEFAULT_ARGS).post('cluster', None) == (SRR['is_rest_96'][1], None)
+
+
+def test_patch_method():
+ register_responses([
+ ('PATCH', 'cluster', SRR['is_rest_96']),
+ ])
+ assert create_restapi_object(DEFAULT_ARGS).patch('cluster', None) == (SRR['is_rest_96'][1], None)
+
+
+def test_delete_method():
+ register_responses([
+ ('DELETE', 'cluster', SRR['is_rest_96']),
+ ])
+ assert create_restapi_object(DEFAULT_ARGS).delete('cluster', None) == (SRR['is_rest_96'][1], None)
+
+
+def test_options_method():
+ register_responses([
+ ('OPTIONS', 'cluster', SRR['is_rest_96']),
+ ])
+ assert create_restapi_object(DEFAULT_ARGS).options('cluster', None) == (SRR['is_rest_96'][1], None)
+
+
+def test_get_node_version_using_rest():
+ register_responses([
+ ('GET', 'cluster/nodes', SRR['nodes']),
+ ])
+ assert create_restapi_object(DEFAULT_ARGS).get_node_version_using_rest() == (200, SRR['nodes'][1]['records'][0], None)
+
+
+def test_get_ontap_version_using_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['precluster_error']),
+ ('GET', 'cluster/nodes', SRR['nodes']),
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ assert rest_api.get_ontap_version_using_rest() == 200
+ assert rest_api.ontap_version['major'] == VERSION['version']['major']
+ assert rest_api.ontap_version['valid']
+
+
+def test__is_rest():
+ if not sys.version_info > (3, 0):
+ return
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ rest_api.use_rest = 'invalid'
+ msg = "use_rest must be one of: never, always, auto. Got: 'invalid'"
+ assert rest_api._is_rest() == (False, msg)
+ # testing always with used_unsupported_rest_properties
+ rest_api.use_rest = 'always'
+ msg = "REST API currently does not support 'xyz'"
+ assert rest_api._is_rest(used_unsupported_rest_properties=['xyz']) == (True, msg)
+ # testing never
+ rest_api.use_rest = 'never'
+ assert rest_api._is_rest() == (False, None)
+ # we need the version
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ])
+ # testing always unconditionnally and with partially_supported_rest_properties
+ rest_api.use_rest = 'always'
+ msg = 'Error: Minimum version of ONTAP for xyz is (9, 7). Current version: (9, 6, 0).'
+ assert rest_api._is_rest(partially_supported_rest_properties=[('xyz', (9, 7))], parameters=['xyz']) == (True, msg)
+ # No error when version requirement is matched
+ assert rest_api._is_rest(partially_supported_rest_properties=[('xyz', (9, 6))], parameters=['xyz']) == (True, None)
+ # No error when parameter is not used
+ assert rest_api._is_rest(partially_supported_rest_properties=[('abc', (9, 6))], parameters=['xyz']) == (True, None)
+ # testing auto with used_unsupported_rest_properties
+ rest_api.use_rest = 'auto'
+ assert rest_api._is_rest(used_unsupported_rest_properties=['xyz']) == (False, None)
+ # TODO: check warning
+
+
+def test_is_rest_supported_properties():
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ rest_api.use_rest = 'always'
+ assert expect_and_capture_ansible_exception(rest_api.is_rest_supported_properties, 'fail', ['xyz'], ['xyz'])['msg'] ==\
+ "REST API currently does not support 'xyz'"
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ])
+ assert rest_api.is_rest_supported_properties(['abc'], ['xyz'])
+ assert rest_api.is_rest_supported_properties(['abc'], ['xyz'], report_error=True) == (True, None)
+
+
+def test_is_rest_partially_supported_properties():
+ if not sys.version_info > (3, 0):
+ return
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ rest_api.use_rest = 'auto'
+ assert not rest_api.is_rest_supported_properties(['xyz'], None, [('xyz', (9, 8, 1))])
+ assert_warning_was_raised('Falling back to ZAPI because of unsupported option(s) or option value(s) "xyz" in REST require (9, 8, 1)')
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ rest_api.use_rest = 'auto'
+ assert rest_api.is_rest_supported_properties(['xyz'], None, [('xyz', (9, 8, 1))])
+
+
+def test_is_rest():
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ # testing always with used_unsupported_rest_properties
+ rest_api.use_rest = 'always'
+ msg = "REST API currently does not support 'xyz'"
+ assert rest_api.is_rest(used_unsupported_rest_properties=['xyz']) == (True, msg)
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ])
+ assert rest_api.is_rest()
+
+
+def test_set_version():
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ rest_api.set_version(VERSION)
+ print('VERSION', rest_api.ontap_version)
+ assert rest_api.ontap_version['generation'] == VERSION['version']['generation']
+ assert rest_api.ontap_version['valid']
+ rest_api.set_version({})
+ assert not rest_api.ontap_version['valid']
+
+
+def test_force_ontap_version_local():
+ """ test get_ontap_version_from_params in isolation """
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ rest_api.set_version(VERSION)
+ print('VERSION', rest_api.ontap_version)
+ assert rest_api.ontap_version['generation'] == VERSION['version']['generation']
+ # same version
+ rest_api.force_ontap_version = VERSION['version']['full']
+ assert not rest_api.get_ontap_version_from_params()
+ # different versions
+ rest_api.force_ontap_version = '10.8.1'
+ warning = rest_api.get_ontap_version_from_params()
+ assert rest_api.ontap_version['generation'] != VERSION['version']['generation']
+ assert rest_api.ontap_version['generation'] == 10
+ assert 'Forcing ONTAP version to 10.8.1 but current version is 9.8.45' in warning
+ # version could not be read
+ rest_api.set_version({})
+ rest_api.force_ontap_version = '10.8'
+ warning = rest_api.get_ontap_version_from_params()
+ assert rest_api.ontap_version['generation'] != VERSION['version']['generation']
+ assert rest_api.ontap_version['generation'] == 10
+ assert rest_api.ontap_version['minor'] == 0
+ assert 'Forcing ONTAP version to 10.8, unable to read current version:' in warning
+
+
+def test_negative_force_ontap_version_local():
+ """ test get_ontap_version_from_params in isolation """
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ # non numeric
+ rest_api.force_ontap_version = '9.8P4'
+ error = 'Error: unexpected format in force_ontap_version, expecting G.M.m or G.M, as in 9.10.1, got: 9.8P4,'
+ assert error in expect_and_capture_ansible_exception(rest_api.get_ontap_version_from_params, 'fail')['msg']
+ # too short
+ rest_api.force_ontap_version = '9'
+ error = 'Error: unexpected format in force_ontap_version, expecting G.M.m or G.M, as in 9.10.1, got: 9,'
+ assert error in expect_and_capture_ansible_exception(rest_api.get_ontap_version_from_params, 'fail')['msg']
+ # too long
+ rest_api.force_ontap_version = '9.1.2.3'
+ error = 'Error: unexpected format in force_ontap_version, expecting G.M.m or G.M, as in 9.10.1, got: 9.1.2.3,'
+ assert error in expect_and_capture_ansible_exception(rest_api.get_ontap_version_from_params, 'fail')['msg']
+
+
+def test_force_ontap_version_rest_call():
+ """ test get_ontap_version_using_rest with force_ontap_version option """
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster', SRR['generic_error']),
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ # same version
+ rest_api.force_ontap_version = '9.7'
+ assert rest_api.get_ontap_version_using_rest() == 200
+ assert_no_warnings()
+ # different versions
+ rest_api.force_ontap_version = '10.8.1'
+ assert rest_api.get_ontap_version_using_rest() == 200
+ assert rest_api.ontap_version['generation'] == 10
+ assert_warning_was_raised('Forcing ONTAP version to 10.8.1 but current version is dummy_9_9_0')
+ # version could not be read
+ assert rest_api.get_ontap_version_using_rest() == 200
+ assert_warning_was_raised('Forcing ONTAP version to 10.8.1, unable to read current version: error: Expected error, status_code: 400')
+ assert rest_api.ontap_version['generation'] == 10
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_netapp_send_request.py b/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_netapp_send_request.py
new file mode 100644
index 000000000..f6ae38f21
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_netapp_send_request.py
@@ -0,0 +1,271 @@
+# Copyright (c) 2018 NetApp
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests for module_utils netapp.py '''
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import pytest
+import sys
+
+from ansible.module_utils import basic
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import \
+ create_module, expect_and_capture_ansible_exception
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+DEFAULT_ARGS = {
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'cert_filepath': None,
+ 'key_filepath': None,
+}
+
+SINGLE_CERT_ARGS = {
+ 'hostname': 'test',
+ 'username': None,
+ 'password': None,
+ 'cert_filepath': 'cert_file',
+ 'key_filepath': None,
+}
+
+CERT_KEY_ARGS = {
+ 'hostname': 'test',
+ 'username': None,
+ 'password': None,
+ 'cert_filepath': 'cert_file',
+ 'key_filepath': 'key_file',
+}
+
+
+class MockONTAPModule:
+ def __init__(self):
+ self.module = basic.AnsibleModule(netapp_utils.na_ontap_host_argument_spec())
+
+
+def create_restapi_object(default_args):
+ module = create_module(MockONTAPModule, default_args)
+ return netapp_utils.OntapRestAPI(module.module)
+
+
+class mockResponse:
+ def __init__(self, json_data, status_code, raise_action=None, headers=None, text=None):
+ self.json_data = json_data
+ self.status_code = status_code
+ self.content = json_data
+ self.raise_action = raise_action
+ self.headers = headers or {}
+ self.text = text
+
+ def raise_for_status(self):
+ if self.status_code >= 400 and self.status_code < 600:
+ raise netapp_utils.requests.exceptions.HTTPError('status_code: %s' % self.status_code, response=self)
+
+ def json(self):
+ if self.raise_action == 'bad_json':
+ raise ValueError(self.raise_action)
+ return self.json_data
+
+
+@patch('requests.request')
+def test_empty_get_sent_bad_json(mock_request):
+ ''' get with no data '''
+ mock_request.return_value = mockResponse(json_data='anything', status_code=200, raise_action='bad_json')
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ message, error = rest_api.get('api', None)
+ assert error
+ assert 'Expecting json, got: anything' in error
+ print('errors:', rest_api.errors)
+ print('debug:', rest_api.debug_logs)
+
+
+@patch('requests.request')
+def test_empty_get_sent_bad_but_empty_json(mock_request):
+ ''' get with no data '''
+ mock_request.return_value = mockResponse(json_data='', status_code=200, raise_action='bad_json')
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ message, error = rest_api.get('api', None)
+ assert not error
+
+
+def test_wait_on_job_bad_url():
+ ''' URL format error '''
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ api = 'testme'
+ job = dict(_links=dict(self=dict(href=api)))
+ message, error = rest_api.wait_on_job(job)
+ msg = "URL Incorrect format: list index out of range - Job: {'_links': {'self': {'href': 'testme'}}}"
+ assert msg in error
+
+
+@patch('time.sleep')
+@patch('requests.request')
+def test_wait_on_job_timeout(mock_request, sleep_mock):
+ ''' get with no data '''
+ mock_request.return_value = mockResponse(json_data='', status_code=200, raise_action='bad_json')
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ api = 'api/testme'
+ job = dict(_links=dict(self=dict(href=api)))
+ message, error = rest_api.wait_on_job(job)
+ msg = 'Timeout error: Process still running'
+ assert msg in error
+
+
+@patch('time.sleep')
+@patch('requests.request')
+def test_wait_on_job_job_error(mock_request, sleep_mock):
+ ''' get with no data '''
+ mock_request.return_value = mockResponse(json_data=dict(error='Job error message'), status_code=200)
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ api = 'api/testme'
+ job = dict(_links=dict(self=dict(href=api)))
+ message, error = rest_api.wait_on_job(job)
+ msg = 'Job error message'
+ assert msg in error
+
+
+@patch('time.sleep')
+@patch('requests.request')
+def test_wait_on_job_job_failure(mock_request, dont_sleep):
+ ''' get with no data '''
+ mock_request.return_value = mockResponse(json_data=dict(error='Job error message', state='failure', message='failure message'), status_code=200)
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ api = 'api/testme'
+ job = dict(_links=dict(self=dict(href=api)))
+ message, error = rest_api.wait_on_job(job)
+ msg = 'failure message'
+ assert msg in error
+ assert not message
+
+
+@patch('time.sleep')
+@patch('requests.request')
+def test_wait_on_job_timeout_running(mock_request, sleep_mock):
+ ''' get with no data '''
+ mock_request.return_value = mockResponse(json_data=dict(error='Job error message', state='running', message='any message'), status_code=200)
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ api = 'api/testme'
+ job = dict(_links=dict(self=dict(href=api)))
+ message, error = rest_api.wait_on_job(job)
+ msg = 'Timeout error: Process still running'
+ assert msg in error
+ assert message == 'any message'
+
+
+@patch('time.sleep')
+@patch('requests.request')
+def test_wait_on_job(mock_request, dont_sleep):
+ ''' get with no data '''
+ mock_request.return_value = mockResponse(json_data=dict(error='Job error message', state='other', message='any message'), status_code=200)
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ api = 'api/testme'
+ job = dict(_links=dict(self=dict(href=api)))
+ message, error = rest_api.wait_on_job(job)
+ msg = 'Job error message'
+ assert msg in error
+ assert message == 'any message'
+
+
+@patch('requests.request')
+def test_get_auth_single_cert(mock_request):
+ ''' get with no data '''
+ mock_request.return_value = mockResponse(json_data='', status_code=200)
+ rest_api = create_restapi_object(SINGLE_CERT_ARGS)
+ api = 'api/testme'
+ # rest_api.auth_method = 'single_cert'
+ message, error = rest_api.get(api, None)
+ print(mock_request.mock_calls)
+ assert rest_api.auth_method == 'single_cert'
+ assert "cert='cert_file'" in str(mock_request.mock_calls[0])
+
+
+@patch('requests.request')
+def test_get_auth_cert_key(mock_request):
+ ''' get with no data '''
+ mock_request.return_value = mockResponse(json_data='', status_code=200)
+ rest_api = create_restapi_object(CERT_KEY_ARGS)
+ api = 'api/testme'
+ # rest_api.auth_method = 'single_cert'
+ message, error = rest_api.get(api, None)
+ print(mock_request.mock_calls)
+ assert rest_api.auth_method == 'cert_key'
+ assert "cert=('cert_file', 'key_file')" in str(mock_request.mock_calls[0])
+
+
+def test_get_auth_method_keyerror():
+ my_cx = create_restapi_object(CERT_KEY_ARGS)
+ my_cx.auth_method = 'invalid_method'
+ args = ('method', 'api', 'params')
+ msg = 'xxxx'
+ assert expect_and_capture_ansible_exception(my_cx.send_request, KeyError, *args) == 'invalid_method'
+
+
+@patch('requests.request')
+def test_http_error_no_json(mock_request):
+ ''' get raises HTTPError '''
+ mock_request.return_value = mockResponse(json_data={}, status_code=400)
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ api = 'api/testme'
+ message, error = rest_api.get(api)
+ assert error == 'status_code: 400'
+
+
+@patch('requests.request')
+def test_http_error_with_json_error_field(mock_request):
+ ''' get raises HTTPError '''
+ mock_request.return_value = mockResponse(json_data=dict(state='other', message='any message', error='error_message'), status_code=400)
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ api = 'api/testme'
+ message, error = rest_api.get(api)
+ assert error == 'error_message'
+
+
+@patch('requests.request')
+def test_http_error_attribute_error(mock_request):
+ ''' get raises HTTPError '''
+ mock_request.return_value = mockResponse(json_data='bad_data', status_code=400)
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ api = 'api/testme'
+ message, error = rest_api.get(api)
+ assert error == 'status_code: 400'
+
+
+@patch('requests.request')
+def test_connection_error(mock_request):
+ ''' get raises HTTPError '''
+ mock_request.side_effect = netapp_utils.requests.exceptions.ConnectionError('connection_error')
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ api = 'api/testme'
+ message, error = rest_api.get(api)
+ # print(rest_api.errors)
+ assert error == 'connection_error'
+ # assert False
+
+
+@patch('requests.request')
+def test_options_allow_in_header(mock_request):
+ ''' OPTIONS returns Allow key '''
+ mock_request.return_value = mockResponse(json_data={}, headers={'Allow': 'allowed'}, status_code=200)
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ api = 'api/testme'
+ message, error = rest_api.options(api)
+ assert error is None
+ assert message == {'Allow': 'allowed'}
+
+
+@patch('requests.request')
+def test_formdata_in_response(mock_request):
+ ''' GET return formdata '''
+ mock_request.return_value = mockResponse(
+ json_data={}, headers={'Content-Type': 'multipart/form-data'}, raise_action='bad_json', status_code=200, text='testme')
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ api = 'api/testme'
+ message, error = rest_api.get(api)
+ assert error is None
+ assert message == {'text': 'testme'}
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_netapp_sf.py b/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_netapp_sf.py
new file mode 100644
index 000000000..99c74242b
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_netapp_sf.py
@@ -0,0 +1,85 @@
+# Copyright (c) 2018-2022 NetApp
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests for module_utils netapp.py - solidfire related methods '''
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import pytest
+
+from ansible.module_utils import basic
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import \
+ patch_ansible, create_module, expect_and_capture_ansible_exception
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+
+if not netapp_utils.has_sf_sdk():
+ pytestmark = pytest.mark.skip("skipping as missing required solidfire")
+
+DEFAULT_ARGS = {
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+}
+
+
+class MockONTAPModule:
+ def __init__(self):
+ self.module = basic.AnsibleModule(netapp_utils.na_ontap_host_argument_spec())
+
+
+def create_ontap_module(default_args=None):
+ return create_module(MockONTAPModule, default_args).module
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.HAS_SF_SDK', 'dummy')
+def test_has_sf_sdk():
+ assert netapp_utils.has_sf_sdk() == 'dummy'
+
+
+@patch('solidfire.factory.ElementFactory.create')
+def test_create_sf_connection(mock_sf_create):
+ module = create_ontap_module(DEFAULT_ARGS)
+ mock_sf_create.return_value = 'dummy'
+ assert netapp_utils.create_sf_connection(module) == 'dummy'
+
+
+@patch('solidfire.factory.ElementFactory.create')
+def test_negative_create_sf_connection_exception(mock_sf_create):
+ module = create_ontap_module(DEFAULT_ARGS)
+ mock_sf_create.side_effect = KeyError('dummy')
+ assert str(expect_and_capture_ansible_exception(netapp_utils.create_sf_connection, Exception, module)) == "Unable to create SF connection: 'dummy'"
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.HAS_SF_SDK', False)
+def test_negative_create_sf_connection_no_sdk():
+ module = create_ontap_module(DEFAULT_ARGS)
+ assert expect_and_capture_ansible_exception(netapp_utils.create_sf_connection, 'fail', module)['msg'] == 'the python SolidFire SDK module is required'
+
+
+def test_negative_create_sf_connection_no_options():
+ module = create_ontap_module(DEFAULT_ARGS)
+ peer_options = {}
+ assert expect_and_capture_ansible_exception(netapp_utils.create_sf_connection, 'fail', module, host_options=peer_options)['msg'] ==\
+ 'hostname, username, password are required for ElementSW connection.'
+
+
+def test_negative_create_sf_connection_missing_and_extra_options():
+ module = create_ontap_module(DEFAULT_ARGS)
+ peer_options = {'hostname': 'host', 'username': 'user'}
+ assert expect_and_capture_ansible_exception(netapp_utils.create_sf_connection, 'fail', module, host_options=peer_options)['msg'] ==\
+ 'password is required for ElementSW connection.'
+ peer_options = {'hostname': 'host', 'username': 'user', 'cert_filepath': 'cert'}
+ assert expect_and_capture_ansible_exception(netapp_utils.create_sf_connection, 'fail', module, host_options=peer_options)['msg'] ==\
+ 'password is required for ElementSW connection. cert_filepath is not supported for ElementSW connection.'
+
+
+def test_negative_create_sf_connection_extra_options():
+ module = create_ontap_module(DEFAULT_ARGS)
+ peer_options = {'hostname': 'host', 'username': 'user'}
+ assert expect_and_capture_ansible_exception(netapp_utils.create_sf_connection, 'fail', module, host_options=peer_options)['msg'] ==\
+ 'password is required for ElementSW connection.'
+ peer_options = {'hostname': 'host', 'username': 'user', 'password': 'pass', 'cert_filepath': 'cert', 'key_filepath': 'key'}
+ assert expect_and_capture_ansible_exception(netapp_utils.create_sf_connection, 'fail', module, host_options=peer_options)['msg'] ==\
+ 'cert_filepath, key_filepath are not supported for ElementSW connection.'
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_netapp_zapi.py b/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_netapp_zapi.py
new file mode 100644
index 000000000..b3a09f5cb
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_netapp_zapi.py
@@ -0,0 +1,374 @@
+# Copyright (c) 2018-2022 NetApp
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests for module_utils netapp.py - ZAPI related features '''
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import pytest
+import sys
+
+from ansible.module_utils import basic
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import \
+ patch_ansible, create_module, expect_and_capture_ansible_exception, assert_warning_was_raised, print_warnings
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke, register_responses, get_mock_record
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_raw_xml_response, build_zapi_error, zapi_responses
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip("skipping as missing required netapp_lib")
+
+if not hasattr(netapp_utils.ssl, 'create_default_context') or not hasattr(netapp_utils.ssl, 'SSLContext'):
+ pytestmark = pytest.mark.skip("skipping as missing required ssl package with SSLContext support")
+
+ZRR = zapi_responses({
+ 'error_no_vserver': build_zapi_error(12345, 'Vserver API missing vserver parameter.'),
+ 'error_connection_error': build_zapi_error(12345, 'URLError'),
+ 'error_other_error': build_zapi_error(12345, 'Some other error message.'),
+})
+
+
+DEFAULT_ARGS = {
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'cert_filepath': None,
+ 'key_filepath': None,
+}
+
+CERT_ARGS = {
+ 'hostname': 'test',
+ 'cert_filepath': 'test_pem.pem',
+ 'key_filepath': 'test_key.key'
+}
+
+
+class MockONTAPModule:
+ def __init__(self):
+ self.module = basic.AnsibleModule(netapp_utils.na_ontap_host_argument_spec())
+
+
+def create_ontap_module(default_args, module_args=None):
+ return create_module(MockONTAPModule, default_args, module_args).module
+
+
+def create_ontapzapicx_object(default_args, module_args=None):
+ ontap_mock = create_module(MockONTAPModule, default_args, module_args)
+ my_args = {'module': ontap_mock.module}
+ for key in 'hostname', 'username', 'password', 'cert_filepath', 'key_filepath':
+ if key in ontap_mock.module.params:
+ my_args[key] = ontap_mock.module.params[key]
+ return netapp_utils.OntapZAPICx(**my_args)
+
+
+def test_get_cserver():
+ ''' validate cluster vserser name is correctly retrieved '''
+ register_responses([
+ ('vserver-get-iter', ZRR['cserver']),
+ ])
+ server = netapp_utils.setup_na_ontap_zapi(module=create_ontap_module(DEFAULT_ARGS))
+ cserver = netapp_utils.get_cserver(server)
+ assert cserver == 'cserver'
+
+
+def test_get_cserver_none():
+ ''' validate cluster vserser name is correctly retrieved '''
+ register_responses([
+ ('vserver-get-iter', ZRR['empty']),
+ ])
+ server = netapp_utils.setup_na_ontap_zapi(module=create_ontap_module(DEFAULT_ARGS))
+ cserver = netapp_utils.get_cserver(server)
+ assert cserver is None
+
+
+def test_negative_get_cserver():
+ ''' validate NaApiError is correctly reported '''
+ register_responses([
+ ('vserver-get-iter', ZRR['error']),
+ ])
+ server = netapp_utils.setup_na_ontap_zapi(module=create_ontap_module(DEFAULT_ARGS))
+ assert expect_and_capture_ansible_exception(netapp_utils.get_cserver, netapp_utils.zapi.NaApiError, server)
+
+
+def test_negative_get_cserver_connection_error():
+ ''' validate NaApiError error is correctly ignore for connection or autorization issues '''
+ register_responses([
+ ('vserver-get-iter', ZRR['error_connection_error']),
+ ])
+ server = netapp_utils.setup_na_ontap_zapi(module=create_ontap_module(DEFAULT_ARGS))
+ cserver = netapp_utils.get_cserver(server)
+ assert cserver is None
+
+
+def test_setup_na_ontap_zapi_logging():
+ module_args = {'feature_flags': {'trace_apis': False}}
+ server = netapp_utils.setup_na_ontap_zapi(module=create_ontap_module(DEFAULT_ARGS, module_args))
+ assert not server._trace
+ module_args = {'feature_flags': {'trace_apis': True}}
+ server = netapp_utils.setup_na_ontap_zapi(module=create_ontap_module(DEFAULT_ARGS, module_args))
+ assert server._trace
+
+
+def test_setup_na_ontap_zapi_auth_method_and_https():
+ module_args = {'feature_flags': {'trace_apis': False}}
+ server = netapp_utils.setup_na_ontap_zapi(module=create_ontap_module(DEFAULT_ARGS, module_args))
+ assert server._auth_style == server.STYLE_LOGIN_PASSWORD
+ assert server.get_port() == '80'
+ module_args = {'feature_flags': {'trace_apis': True}}
+ server = netapp_utils.setup_na_ontap_zapi(module=create_ontap_module(CERT_ARGS, module_args))
+ assert server._auth_style == server.STYLE_CERTIFICATE
+ assert server.get_port() == '443'
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.HAS_NETAPP_LIB', False)
+def test_negative_setup_na_ontap_zapi():
+ error = 'Error: the python NetApp-Lib module is required. Import error: None'
+ assert expect_and_capture_ansible_exception(netapp_utils.setup_na_ontap_zapi, 'fail', create_ontap_module(DEFAULT_ARGS))['msg'] == error
+
+
+def test_set_zapi_port_and_transport():
+ server = netapp_utils.setup_na_ontap_zapi(module=create_ontap_module(DEFAULT_ARGS))
+ netapp_utils.set_zapi_port_and_transport(server, True, None, False)
+ assert server.get_port() == '443'
+ assert server.get_transport_type() == 'https'
+ netapp_utils.set_zapi_port_and_transport(server, False, None, False)
+ assert server.get_port() == '80'
+ assert server.get_transport_type() == 'http'
+
+
+@patch('ssl.SSLContext.load_cert_chain')
+def test_certificate_method_zapi(mock_ssl):
+ ''' should fail when trying to read the certificate file '''
+ zapi_cx = create_ontapzapicx_object(CERT_ARGS)
+ assert isinstance(zapi_cx._create_certificate_auth_handler(), netapp_utils.zapi.urllib.request.HTTPSHandler)
+ assert zapi_cx._get_url() == 'http://test:80/servlets/netapp.servlets.admin.XMLrequest_filer'
+
+
+def test_certificate_method_zapi_missing_files():
+ ''' should fail when trying to read the certificate file '''
+ zapi_cx = create_ontapzapicx_object(CERT_ARGS)
+ msg1 = 'Cannot load SSL certificate, check files exist.'
+ # for python 2,6 :(
+ msg2 = 'SSL certificate authentication requires python 2.7 or later.'
+ assert expect_and_capture_ansible_exception(zapi_cx._create_certificate_auth_handler, 'fail')['msg'].startswith((msg1, msg2))
+ assert zapi_cx._get_url() == 'http://test:80/servlets/netapp.servlets.admin.XMLrequest_filer'
+
+
+@patch('ssl.create_default_context')
+def test_negative_certificate_method_zapi(mock_ssl):
+ ''' should fail when trying to set context '''
+ mock_ssl.side_effect = AttributeError('for test purpose')
+ zapi_cx = create_ontapzapicx_object(CERT_ARGS)
+ # AttributeError('for test purpose') with 3.x but AttributeError('for test purpose',) with 2.7
+ error = "SSL certificate authentication requires python 2.7 or later. More info: AttributeError('for test purpose'"
+ assert expect_and_capture_ansible_exception(zapi_cx._create_certificate_auth_handler, 'fail')['msg'].startswith(error)
+
+
+def test_classify_zapi_exception_cluster_only():
+ ''' verify output matches expectations '''
+ code = 13005
+ message = 'Unable to find API: diagnosis-alert-get-iter on data vserver trident_svm'
+ zapi_exception = netapp_utils.zapi.NaApiError(code, message)
+ kind, new_message = netapp_utils.classify_zapi_exception(zapi_exception)
+ assert kind == 'missing_vserver_api_error'
+ assert new_message.endswith("%d:%s" % (code, message))
+
+
+def test_classify_zapi_exception_rpc_error():
+ ''' verify output matches expectations '''
+ code = 13001
+ message = "RPC: Couldn't make connection [from mgwd on node \"laurentn-vsim1\" (VSID: -1) to mgwd at 172.32.78.223]"
+ error_message = 'NetApp API failed. Reason - %d:%s' % (code, message)
+ zapi_exception = netapp_utils.zapi.NaApiError(code, message)
+ kind, new_message = netapp_utils.classify_zapi_exception(zapi_exception)
+ assert kind == 'rpc_error'
+ assert new_message == error_message
+
+
+def test_classify_zapi_exception_other_error():
+ ''' verify output matches expectations '''
+ code = 13008
+ message = 'whatever'
+ error_message = 'NetApp API failed. Reason - %d:%s' % (code, message)
+ zapi_exception = netapp_utils.zapi.NaApiError(code, message)
+ kind, new_message = netapp_utils.classify_zapi_exception(zapi_exception)
+ assert kind == 'other_error'
+ assert new_message == error_message
+
+
+def test_classify_zapi_exception_attributeerror():
+ ''' verify output matches expectations '''
+ zapi_exception = 'invalid'
+ kind, new_message = netapp_utils.classify_zapi_exception(zapi_exception)
+ assert kind == 'other_error'
+ assert new_message == zapi_exception
+
+
+def test_zapi_parse_response_sanitized():
+ ''' should not fail when trying to read invalid XML characters (\x08) '''
+ zapi_cx = create_ontapzapicx_object(DEFAULT_ARGS)
+ response = b"<?xml version='1.0' encoding='UTF-8' ?>\n<!DOCTYPE netapp SYSTEM 'file:/etc/netapp_gx.dtd'>\n"
+ response += b"<netapp version='1.180' xmlns='http://www.netapp.com/filer/admin'>\n<results status=\"passed\">"
+ response += b"<cli-output> (cluster log-forwarding create)\n\n"
+ response += b"Testing network connectivity to the destination host 10.10.10.10. \x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\n\n"
+ response += b"Error: command failed: Cannot contact destination host (10.10.10.10) from node\n"
+ response += b" &quot;laurentn-vsim1&quot;. Verify connectivity to desired host or skip the\n"
+ response += b" connectivity check with the &quot;-force&quot; parameter.</cli-output>"
+ response += b"<cli-result-value>0</cli-result-value></results></netapp>\n"
+ # Manually extract cli-output contents
+ cli_output = response.split(b'<cli-output>')[1]
+ cli_output = cli_output.split(b'</cli-output>')[0]
+ cli_output = cli_output.replace(b'&quot;', b'"')
+ # the XML parser would chole on \x08, zapi_cx._parse_response replaces them with '.'
+ cli_output = cli_output.replace(b'\x08', b'.')
+ # Use xml parser to extract cli-output contents
+ xml = zapi_cx._parse_response(response)
+ results = xml.get_child_by_name('results')
+ new_cli_output = results.get_child_content('cli-output')
+ assert cli_output.decode() == new_cli_output
+
+
+def test_zapi_parse_response_unsanitized():
+ ''' should fail when trying to read invalid XML characters (\x08) '''
+ # use feature_flags to disable sanitization
+ module_args = {'feature_flags': {'sanitize_xml': False}}
+ zapi_cx = create_ontapzapicx_object(DEFAULT_ARGS, module_args)
+ response = b"<?xml version='1.0' encoding='UTF-8' ?>\n<!DOCTYPE netapp SYSTEM 'file:/etc/netapp_gx.dtd'>\n"
+ response += b"<netapp version='1.180' xmlns='http://www.netapp.com/filer/admin'>\n<results status=\"passed\">"
+ response += b"<cli-output> (cluster log-forwarding create)\n\n"
+ response += b"Testing network connectivity to the destination host 10.10.10.10. \x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\x08\n\n"
+ response += b"Error: command failed: Cannot contact destination host (10.10.10.10) from node\n"
+ response += b" &quot;laurentn-vsim1&quot;. Verify connectivity to desired host or skip the\n"
+ response += b" connectivity check with the &quot;-force&quot; parameter.</cli-output>"
+ response += b"<cli-result-value>0</cli-result-value></results></netapp>\n"
+ with pytest.raises(netapp_utils.zapi.etree.XMLSyntaxError) as exc:
+ zapi_cx._parse_response(response)
+ msg = 'PCDATA invalid Char value 8'
+ assert exc.value.msg.startswith(msg)
+
+
+def test_zapi_cx_add_auth_header():
+ ''' should add header '''
+ module = create_ontap_module(DEFAULT_ARGS)
+ zapi_cx = netapp_utils.setup_na_ontap_zapi(module)
+ assert isinstance(zapi_cx, netapp_utils.OntapZAPICx)
+ assert zapi_cx.base64_creds is not None
+ request, dummy = zapi_cx._create_request(netapp_utils.zapi.NaElement('dummy_tag'))
+ assert "Authorization" in [x[0] for x in request.header_items()]
+
+
+def test_zapi_cx_add_auth_header_explicit():
+ ''' should add header '''
+ module_args = {'feature_flags': {'classic_basic_authorization': False}}
+ module = create_ontap_module(DEFAULT_ARGS, module_args)
+ zapi_cx = netapp_utils.setup_na_ontap_zapi(module)
+ assert isinstance(zapi_cx, netapp_utils.OntapZAPICx)
+ assert zapi_cx.base64_creds is not None
+ request, dummy = zapi_cx._create_request(netapp_utils.zapi.NaElement('dummy_tag'))
+ assert "Authorization" in [x[0] for x in request.header_items()]
+
+
+def test_zapi_cx_no_auth_header():
+ ''' should add header '''
+ module_args = {'feature_flags': {'classic_basic_authorization': True, 'always_wrap_zapi': False}}
+ module = create_ontap_module(DEFAULT_ARGS, module_args)
+ zapi_cx = netapp_utils.setup_na_ontap_zapi(module)
+ assert not isinstance(zapi_cx, netapp_utils.OntapZAPICx)
+ request, dummy = zapi_cx._create_request(netapp_utils.zapi.NaElement('dummy_tag'))
+ assert "Authorization" not in [x[0] for x in request.header_items()]
+
+
+def test_is_zapi_connection_error():
+ message = 'URLError'
+ assert netapp_utils.is_zapi_connection_error(message)
+ if sys.version_info >= (3, 5, 0):
+ # not defined in python 2.7
+ message = (ConnectionError(), '')
+ assert netapp_utils.is_zapi_connection_error(message)
+ message = []
+ assert not netapp_utils.is_zapi_connection_error(message)
+
+
+def test_is_zapi_write_access_error():
+ message = 'Insufficient privileges: XXXXXXX does not have write access'
+ assert netapp_utils.is_zapi_write_access_error(message)
+ message = 'URLError'
+ assert not netapp_utils.is_zapi_write_access_error(message)
+ message = []
+ assert not netapp_utils.is_zapi_write_access_error(message)
+
+
+def test_is_zapi_missing_vserver_error():
+ message = 'Vserver API missing vserver parameter.'
+ assert netapp_utils.is_zapi_missing_vserver_error(message)
+ message = 'URLError'
+ assert not netapp_utils.is_zapi_missing_vserver_error(message)
+ message = []
+ assert not netapp_utils.is_zapi_missing_vserver_error(message)
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.IMPORT_EXCEPTION', 'test_exc')
+def test_netapp_lib_is_required():
+ msg = 'Error: the python NetApp-Lib module is required. Import error: %s' % 'test_exc'
+ assert netapp_utils.netapp_lib_is_required() == msg
+
+
+def test_warn_when_rest_is_not_supported_http():
+ assert netapp_utils.setup_na_ontap_zapi(module=create_ontap_module(DEFAULT_ARGS, {'use_rest': 'always'}))
+ print_warnings()
+ assert_warning_was_raised("Using ZAPI for basic.py, ignoring 'use_rest: always'. Note: https is set to false.")
+
+
+def test_warn_when_rest_is_not_supported_https():
+ assert netapp_utils.setup_na_ontap_zapi(module=create_ontap_module(DEFAULT_ARGS, {'use_rest': 'always', 'https': True}))
+ print_warnings()
+ assert_warning_was_raised("Using ZAPI for basic.py, ignoring 'use_rest: always'.")
+
+
+def test_sanitize_xml():
+ zapi_cx = create_ontapzapicx_object(CERT_ARGS)
+ xml = build_raw_xml_response({'test_key': 'test_Value'})
+ print('XML', xml)
+ assert zapi_cx.sanitize_xml(xml) == xml
+
+ # these tests require that 'V' is not used, and 3.x because of bytes
+ if sys.version_info > (3, 0):
+ test_xml = zapi_cx.sanitize_xml(xml.replace(b'V', bytes([8])))
+ sanitized_xml = xml.replace(b'V', b'.')
+ assert zapi_cx.sanitize_xml(test_xml) == sanitized_xml
+
+ with patch('builtins.bytes') as mock_bytes:
+ # forcing bytes to return some unexpected value to force the older paths
+ mock_bytes.return_value = 0
+ assert zapi_cx.sanitize_xml(test_xml) == sanitized_xml
+ with patch('builtins.chr') as mock_chr:
+ # forcing python 2.7 behavior
+ mock_chr.return_value = b'\x08'
+ assert zapi_cx.sanitize_xml(test_xml) == sanitized_xml
+
+
+def test_parse_response_exceptions_single():
+ zapi_cx = create_ontapzapicx_object(CERT_ARGS)
+ exc = expect_and_capture_ansible_exception(zapi_cx._parse_response, netapp_utils.zapi.etree.XMLSyntaxError, b'response')
+ print(exc.value)
+ assert str(exc.value).startswith('Start tag expected')
+
+
+@patch('netapp_lib.api.zapi.zapi.NaServer._parse_response')
+def test_parse_response_exceptions_double(mock_parse_response):
+ xml_exc = netapp_utils.zapi.etree.XMLSyntaxError('UT', 'code', 101, 22, 'filename')
+ mock_parse_response.side_effect = [xml_exc, KeyError('second exception')]
+ zapi_cx = create_ontapzapicx_object(CERT_ARGS)
+ exc = expect_and_capture_ansible_exception(zapi_cx._parse_response, netapp_utils.zapi.etree.XMLSyntaxError, 'response')
+ print(exc)
+ assert str(exc.value) == 'UT. Received: response (filename, line 101)'
+
+ # force an exception while processing exception
+ delattr(xml_exc, 'msg')
+ mock_parse_response.side_effect = [xml_exc, KeyError('second exception')]
+ zapi_cx = create_ontapzapicx_object(CERT_ARGS)
+ exc = expect_and_capture_ansible_exception(zapi_cx._parse_response, netapp_utils.zapi.etree.XMLSyntaxError, 'response')
+ print(exc)
+ assert str(exc.value) == 'None (filename, line 101)'
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_response_helper.py b/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_response_helper.py
new file mode 100644
index 000000000..21bb3c187
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_response_helper.py
@@ -0,0 +1,156 @@
+# Copyright (c) 2022 NetApp
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests for module_utils rest_generic.py - REST features '''
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+
+from ansible_collections.netapp.ontap.plugins.module_utils import rest_response_helpers
+
+RECORD = {'key': 'value'}
+
+RESPONSES = {
+ 'empty': {},
+ 'zero_record': {'num_records': 0},
+ 'empty_records': {'records': []},
+ 'one_record': {'records': [RECORD], 'num_records': 1},
+ 'one_record_no_num_records': {'records': [RECORD]},
+ 'one_record_no_num_records_no_records': RECORD,
+ 'two_records': {'records': [RECORD, RECORD], 'num_records': 2},
+}
+
+
+def test_check_for_0_or_1_records():
+ # no records --> None
+ response_in, error_in, response_out, error_out = RESPONSES['zero_record'], None, None, None
+ assert rest_response_helpers.check_for_0_or_1_records('cluster', response_in, error_in) == (response_out, error_out)
+ response_in, error_in, response_out, error_out = RESPONSES['empty_records'], None, None, None
+ assert rest_response_helpers.check_for_0_or_1_records('cluster', response_in, error_in) == (response_out, error_out)
+
+ # one record
+ response_in, error_in, response_out, error_out = RESPONSES['one_record'], None, RECORD, None
+ assert rest_response_helpers.check_for_0_or_1_records('cluster', response_in, error_in) == (response_out, error_out)
+ response_in, error_in, response_out, error_out = RESPONSES['one_record_no_num_records'], None, RECORD, None
+ assert rest_response_helpers.check_for_0_or_1_records('cluster', response_in, error_in) == (response_out, error_out)
+ response_in, error_in, response_out, error_out = RESPONSES['one_record_no_num_records_no_records'], None, RECORD, None
+ assert rest_response_helpers.check_for_0_or_1_records('cluster', response_in, error_in) == (response_out, error_out)
+
+
+def test_check_for_0_or_1_records_errors():
+ # bad input
+ response_in, error_in, response_out, error_out = None, None, None, 'calling: cluster: no response None.'
+ assert rest_response_helpers.check_for_0_or_1_records('cluster', response_in, error_in) == (response_out, error_out)
+ response_in, error_in, response_out, error_out = RESPONSES['empty'], None, None, 'calling: cluster: no response {}.'
+ assert rest_response_helpers.check_for_0_or_1_records('cluster', response_in, error_in) == (response_out, error_out)
+
+ # error in
+ response_in, error_in, response_out, error_out = None, 'some_error', None, 'calling: cluster: got some_error.'
+ assert rest_response_helpers.check_for_0_or_1_records('cluster', response_in, error_in) == (response_out, error_out)
+
+ # more than 1 record
+ response_in, error_in, response_out, error_out = RESPONSES['two_records'], None, RESPONSES['two_records'], 'calling: cluster: unexpected response'
+ response, error = rest_response_helpers.check_for_0_or_1_records('cluster', response_in, error_in)
+ assert response == response_out
+ assert error.startswith(error_out)
+ assert 'for query' not in error
+ response, error = rest_response_helpers.check_for_0_or_1_records('cluster', response_in, error_in, query=RECORD)
+ assert response == response_out
+ assert error.startswith(error_out)
+ expected = 'for query: %s' % RECORD
+ assert expected in error
+
+
+def test_check_for_0_or_more_records():
+ # no records --> None
+ response_in, error_in, response_out, error_out = RESPONSES['zero_record'], None, None, None
+ assert rest_response_helpers.check_for_0_or_more_records('cluster', response_in, error_in) == (response_out, error_out)
+ response_in, error_in, response_out, error_out = RESPONSES['empty_records'], None, None, None
+ assert rest_response_helpers.check_for_0_or_more_records('cluster', response_in, error_in) == (response_out, error_out)
+
+ # one record
+ response_in, error_in, response_out, error_out = RESPONSES['one_record'], None, [RECORD], None
+ assert rest_response_helpers.check_for_0_or_more_records('cluster', response_in, error_in) == (response_out, error_out)
+ response_in, error_in, response_out, error_out = RESPONSES['one_record_no_num_records'], None, [RECORD], None
+ assert rest_response_helpers.check_for_0_or_more_records('cluster', response_in, error_in) == (response_out, error_out)
+
+ # more than 1 record
+ response_in, error_in, response_out, error_out = RESPONSES['two_records'], None, [RECORD, RECORD], None
+ assert rest_response_helpers.check_for_0_or_more_records('cluster', response_in, error_in) == (response_out, error_out)
+
+
+def test_check_for_0_or_more_records_errors():
+ # bad input
+ response_in, error_in, response_out, error_out = None, None, None, 'calling: cluster: no response None.'
+ assert rest_response_helpers.check_for_0_or_more_records('cluster', response_in, error_in) == (response_out, error_out)
+ response_in, error_in, response_out, error_out = RESPONSES['empty'], None, None, 'calling: cluster: no response {}.'
+ assert rest_response_helpers.check_for_0_or_more_records('cluster', response_in, error_in) == (response_out, error_out)
+ error = "calling: cluster: got No \"records\" key in {'key': 'value'}."
+ response_in, error_in, response_out, error_out = RESPONSES['one_record_no_num_records_no_records'], None, None, error
+ assert rest_response_helpers.check_for_0_or_more_records('cluster', response_in, error_in) == (response_out, error_out)
+
+ # error in
+ response_in, error_in, response_out, error_out = None, 'some_error', None, 'calling: cluster: got some_error.'
+ assert rest_response_helpers.check_for_0_or_more_records('cluster', response_in, error_in) == (response_out, error_out)
+
+
+class MockOntapRestAPI:
+ def __init__(self, job_response=None, error=None, raise_if_called=False):
+ self.job_response, self.error, self.raise_if_called = job_response, error, raise_if_called
+
+ def wait_on_job(self, job):
+ if self.raise_if_called:
+ raise AttributeError('wait_on_job should not be called in this test!')
+ return self.job_response, self.error
+
+
+def test_check_for_error_and_job_results_no_job():
+ rest_api = MockOntapRestAPI(raise_if_called=True)
+ response_in, error_in, response_out, error_out = None, None, None, None
+ assert rest_response_helpers.check_for_error_and_job_results('cluster', response_in, error_in, rest_api) == (response_out, error_out)
+
+ response_in, error_in, response_out, error_out = 'any', None, 'any', None
+ assert rest_response_helpers.check_for_error_and_job_results('cluster', response_in, error_in, rest_api) == (response_out, error_out)
+
+ response = {'no_job': 'entry'}
+ response_in, error_in, response_out, error_out = response, None, response, None
+ assert rest_response_helpers.check_for_error_and_job_results('cluster', response_in, error_in, rest_api) == (response_out, error_out)
+
+
+def test_check_for_error_and_job_results_with_job():
+ rest_api = MockOntapRestAPI(job_response='job_response', error=None)
+ response = {'job': 'job_entry'}
+ expected_response = {'job': 'job_entry', 'job_response': 'job_response'}
+ response_in, error_in, response_out, error_out = response, None, expected_response, None
+ assert rest_response_helpers.check_for_error_and_job_results('cluster', response_in, error_in, rest_api) == (response_out, error_out)
+
+ response = {'jobs': ['job_entry'], 'num_records': 1}
+ expected_response = {'jobs': ['job_entry'], 'num_records': 1, 'job_response': 'job_response'}
+ response_in, error_in, response_out, error_out = response, None, expected_response, None
+ assert rest_response_helpers.check_for_error_and_job_results('cluster', response_in, error_in, rest_api) == (response_out, error_out)
+
+
+def test_negative_check_for_error_and_job_results_error_in():
+ rest_api = MockOntapRestAPI(raise_if_called=True)
+ response_in, error_in, response_out, error_out = None, 'forced_error', None, 'calling: cluster: got forced_error.'
+ assert rest_response_helpers.check_for_error_and_job_results('cluster', response_in, error_in, rest_api) == (response_out, error_out)
+ assert rest_response_helpers.check_for_error_and_job_results('cluster', response_in, error_in, rest_api, raw_error=False) == (response_out, error_out)
+ error_out = 'forced_error'
+ assert rest_response_helpers.check_for_error_and_job_results('cluster', response_in, error_in, rest_api, raw_error=True) == (response_out, error_out)
+
+
+def test_negative_check_for_error_and_job_results_job_error():
+ rest_api = MockOntapRestAPI(job_response='job_response', error='job_error')
+ response = {'job': 'job_entry'}
+ response_in, error_in, response_out, error_out = response, None, response, "job reported error: job_error, received {'job': 'job_entry'}."
+ assert rest_response_helpers.check_for_error_and_job_results('cluster', response_in, error_in, rest_api) == (response_out, error_out)
+ assert rest_response_helpers.check_for_error_and_job_results('cluster', response_in, error_in, rest_api, raw_error=False) == (response_out, error_out)
+ error_out = 'job_error'
+ assert rest_response_helpers.check_for_error_and_job_results('cluster', response_in, error_in, rest_api, raw_error=True) == (response_out, error_out)
+
+
+def test_negative_check_for_error_and_job_results_multiple_jobs_error():
+ rest_api = MockOntapRestAPI(raise_if_called=True)
+ response = {'jobs': 'job_entry', 'num_records': 3}
+ response_in, error_in, response_out, error_out = response, None, response, "multiple jobs in progress, can't check status"
+ assert rest_response_helpers.check_for_error_and_job_results('cluster', response_in, error_in, rest_api) == (response_out, error_out)
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_rest_application.py b/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_rest_application.py
new file mode 100644
index 000000000..346114ebb
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_rest_application.py
@@ -0,0 +1,346 @@
+# This code is part of Ansible, but is an independent component.
+# This particular file snippet, and this file snippet only, is BSD licensed.
+# Modules you write using this snippet, which is embedded dynamically by Ansible
+# still belong to the author of the module, and may assign their own license
+# to the complete work.
+#
+# Copyright (c) 2022, Laurent Nicolas <laurentn@netapp.com>
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without modification,
+# are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
+# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
+# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+""" unit tests for module_utils rest_vserver.py
+
+ Provides wrappers for svm/svms REST APIs
+"""
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import pytest
+import sys
+
+from ansible.module_utils import basic
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import create_module, patch_ansible
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_error_message, rest_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke, register_responses
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+
+from ansible_collections.netapp.ontap.plugins.module_utils import rest_application
+
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+
+# REST API canned responses when mocking send_request
+SRR = rest_responses({
+ 'app_uuid': (200, {"records": [{"uuid": "test_uuid"}], "num_records": 1}, None),
+ 'app_details': (200, {"details": "test_details"}, None),
+ 'app_components': (200, {"records": [{"component": "test_component", "uuid": "component_uuid"}], "num_records": 1}, None),
+ 'app_component_details': (200, {"component": "test_component", "uuid": "component_uuid", 'backing_storage': 'backing_storage'}, None),
+ 'unexpected_argument': (200, None, 'Unexpected argument: exclude_aggregates'),
+})
+
+
+DEFAULT_ARGS = {
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'cert_filepath': None,
+ 'key_filepath': None,
+}
+
+
+class MockONTAPModule:
+ def __init__(self):
+ self.module = basic.AnsibleModule(netapp_utils.na_ontap_host_argument_spec())
+
+
+def create_restapi_object(default_args, module_args=None):
+ module = create_module(MockONTAPModule, default_args, module_args)
+ return netapp_utils.OntapRestAPI(module.module)
+
+
+def create_app(svm_name='vserver_name', app_name='application_name'):
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ return rest_application.RestApplication(rest_api, svm_name, app_name)
+
+
+def test_successfully_create_object():
+ register_responses([
+ # ('GET', 'svm/svms', SRR['svm_uuid']),
+ # ('GET', 'svm/svms', SRR['zero_records']),
+ ])
+ assert create_app().svm_name == 'vserver_name'
+
+
+def test_successfully_get_application_uuid():
+ register_responses([
+ ('GET', 'application/applications', SRR['zero_records']),
+ ('GET', 'application/applications', SRR['app_uuid']),
+ ])
+ my_app = create_app()
+ assert my_app.get_application_uuid() == (None, None)
+ assert my_app.get_application_uuid() == ('test_uuid', None)
+ # UUID is cached if not None, so no API call
+ assert my_app.get_application_uuid() == ('test_uuid', None)
+ assert my_app.get_application_uuid() == ('test_uuid', None)
+
+
+def test_negative_get_application_uuid():
+ register_responses([
+ ('GET', 'application/applications', SRR['generic_error']),
+ ])
+ my_app = create_app()
+ assert my_app.get_application_uuid() == (None, rest_error_message('', 'application/applications'))
+
+
+def test_successfully_get_application_details():
+ register_responses([
+ ('GET', 'application/applications', SRR['zero_records']),
+ ('GET', 'application/applications', SRR['app_uuid']),
+ ('GET', 'application/applications/test_uuid', SRR['app_details']),
+ ('GET', 'application/applications/test_uuid', SRR['app_details']),
+ ('GET', 'application/applications/test_uuid', SRR['app_details']),
+ ])
+ my_app = create_app()
+ assert my_app.get_application_details() == (None, None)
+ assert my_app.get_application_details() == (SRR['app_details'][1], None)
+ # UUID is cached if not None, so no API call
+ assert my_app.get_application_details(template='test') == (SRR['app_details'][1], None)
+ assert my_app.get_application_details() == (SRR['app_details'][1], None)
+
+
+def test_negative_get_application_details():
+ register_responses([
+ ('GET', 'application/applications', SRR['generic_error']),
+ ('GET', 'application/applications', SRR['app_uuid']),
+ ('GET', 'application/applications/test_uuid', SRR['generic_error']),
+ ])
+ my_app = create_app()
+ assert my_app.get_application_details() == (None, rest_error_message('', 'application/applications'))
+ assert my_app.get_application_details() == (None, rest_error_message('', 'application/applications/test_uuid'))
+
+
+def test_successfully_create_application():
+ register_responses([
+ ('POST', 'application/applications', SRR['success']),
+ ])
+ my_app = create_app()
+ assert my_app.create_application({'option': 'option'}) == ({}, None)
+
+
+def test_negative_create_application():
+ register_responses([
+ ('POST', 'application/applications', SRR['generic_error']),
+ ('POST', 'application/applications', SRR['unexpected_argument']),
+ # third call, create fails if app already exists
+ ('GET', 'application/applications', SRR['app_uuid']),
+ ])
+ my_app = create_app()
+ assert my_app.create_application({'option': 'option'}) == (None, rest_error_message('', 'application/applications'))
+ assert my_app.create_application({'option': 'option'}) == (
+ None, 'calling: application/applications: got Unexpected argument: exclude_aggregates. "exclude_aggregates" requires ONTAP 9.9.1 GA or later.')
+
+ assert my_app.get_application_uuid() == ('test_uuid', None)
+ assert my_app.create_application({'option': 'option'}) ==\
+ (None, 'function create_application should not be called when application uuid is set: test_uuid.')
+
+
+def test_successfully_patch_application():
+ register_responses([
+ ('GET', 'application/applications', SRR['app_uuid']),
+ ('PATCH', 'application/applications/test_uuid', SRR['success']),
+ ])
+ my_app = create_app()
+ assert my_app.get_application_uuid() == ('test_uuid', None)
+ assert my_app.patch_application({'option': 'option'}) == ({}, None)
+
+
+def test_negative_patch_application():
+ register_responses([
+ # first call, patch fails if app does not exist
+ # second call
+ ('GET', 'application/applications', SRR['app_uuid']),
+ ('PATCH', 'application/applications/test_uuid', SRR['generic_error']),
+ ])
+ my_app = create_app()
+ assert my_app.patch_application({'option': 'option'}) == (None, 'function should not be called before application uuid is set.')
+
+ assert my_app.get_application_uuid() == ('test_uuid', None)
+ assert my_app.patch_application({'option': 'option'}) == (None, rest_error_message('', 'application/applications/test_uuid'))
+
+
+def test_successfully_delete_application():
+ register_responses([
+ ('GET', 'application/applications', SRR['app_uuid']),
+ ('DELETE', 'application/applications/test_uuid', SRR['success']),
+ ])
+ my_app = create_app()
+ assert my_app.get_application_uuid() == ('test_uuid', None)
+ assert my_app.delete_application() == ({}, None)
+
+
+def test_negative_delete_application():
+ register_responses([
+ # first call, delete fails if app does not exist
+ # second call
+ ('GET', 'application/applications', SRR['app_uuid']),
+ ('DELETE', 'application/applications/test_uuid', SRR['generic_error']),
+ ])
+ my_app = create_app()
+ assert my_app.delete_application() == (None, 'function should not be called before application uuid is set.')
+
+ assert my_app.get_application_uuid() == ('test_uuid', None)
+ assert my_app.delete_application() == (None, rest_error_message('', 'application/applications/test_uuid'))
+
+
+def test_successfully_get_application_components():
+ register_responses([
+ ('GET', 'application/applications', SRR['app_uuid']),
+ ('GET', 'application/applications/test_uuid/components', SRR['zero_records']),
+ ('GET', 'application/applications/test_uuid/components', SRR['app_components']),
+ ('GET', 'application/applications/test_uuid/components', SRR['app_components']),
+ ])
+ my_app = create_app()
+ assert my_app.get_application_uuid() == ('test_uuid', None)
+ assert my_app.get_application_components() == (None, None)
+ assert my_app.get_application_components() == (SRR['app_components'][1]['records'], None)
+ assert my_app.get_application_components() == (SRR['app_components'][1]['records'], None)
+
+
+def test_negative_get_application_components():
+ register_responses([
+ ('GET', 'application/applications', SRR['app_uuid']),
+ ('GET', 'application/applications/test_uuid/components', SRR['generic_error']),
+ ])
+ my_app = create_app()
+ assert my_app.get_application_components() == (None, 'function should not be called before application uuid is set.')
+ assert my_app.get_application_uuid() == ('test_uuid', None)
+ assert my_app.get_application_components() == (None, rest_error_message('', 'application/applications/test_uuid/components'))
+
+
+def test_successfully_get_application_component_uuid():
+ register_responses([
+ ('GET', 'application/applications', SRR['app_uuid']),
+ ('GET', 'application/applications/test_uuid/components', SRR['zero_records']),
+ ('GET', 'application/applications/test_uuid/components', SRR['app_components']),
+ ('GET', 'application/applications/test_uuid/components', SRR['app_components']),
+ ])
+ my_app = create_app()
+ assert my_app.get_application_uuid() == ('test_uuid', None)
+ assert my_app.get_application_component_uuid() == (None, None)
+ assert my_app.get_application_component_uuid() == ('component_uuid', None)
+ assert my_app.get_application_component_uuid() == ('component_uuid', None)
+
+
+def test_negative_get_application_component_uuid():
+ register_responses([
+ ('GET', 'application/applications', SRR['app_uuid']),
+ ('GET', 'application/applications/test_uuid/components', SRR['generic_error']),
+ ])
+ my_app = create_app()
+ assert my_app.get_application_component_uuid() == (None, 'function should not be called before application uuid is set.')
+ assert my_app.get_application_uuid() == ('test_uuid', None)
+ assert my_app.get_application_component_uuid() == (None, rest_error_message('', 'application/applications/test_uuid/components'))
+
+
+def test_successfully_get_application_component_details():
+ register_responses([
+ ('GET', 'application/applications', SRR['app_uuid']),
+ ('GET', 'application/applications/test_uuid/components', SRR['app_components']),
+ ('GET', 'application/applications/test_uuid/components/component_uuid', SRR['app_components']),
+ ])
+ my_app = create_app()
+ assert my_app.get_application_uuid() == ('test_uuid', None)
+ assert my_app.get_application_component_details() == (SRR['app_components'][1]['records'][0], None)
+
+
+def test_negative_get_application_component_details():
+ register_responses([
+ # first call, fail as UUID not set
+ # second call, fail to retrieve UUID
+ ('GET', 'application/applications', SRR['app_uuid']),
+ ('GET', 'application/applications/test_uuid/components', SRR['zero_records']),
+ # fail to retrieve UUID
+ ('GET', 'application/applications/test_uuid/components', SRR['generic_error']),
+ # fail to retrieve component_details
+ ('GET', 'application/applications/test_uuid/components', SRR['app_components']),
+ ('GET', 'application/applications/test_uuid/components/component_uuid', SRR['generic_error']),
+ ])
+ my_app = create_app()
+ assert my_app.get_application_component_details() == (None, 'function should not be called before application uuid is set.')
+ # second call, set UUI first
+ assert my_app.get_application_uuid() == ('test_uuid', None)
+ assert my_app.get_application_component_details() == (None, 'no component for application application_name')
+ # third call
+ assert my_app.get_application_component_details() == (None, rest_error_message('', 'application/applications/test_uuid/components'))
+ # fourth call
+ assert my_app.get_application_component_details() == (None, rest_error_message('', 'application/applications/test_uuid/components/component_uuid'))
+
+
+def test_successfully_get_application_component_backing_storage():
+ register_responses([
+ ('GET', 'application/applications', SRR['app_uuid']),
+ ('GET', 'application/applications/test_uuid/components', SRR['app_components']),
+ ('GET', 'application/applications/test_uuid/components/component_uuid', SRR['app_component_details']),
+ ])
+ my_app = create_app()
+ assert my_app.get_application_uuid() == ('test_uuid', None)
+ assert my_app.get_application_component_backing_storage() == ('backing_storage', None)
+
+
+def test_negative_get_application_component_backing_storage():
+ register_responses([
+ # first call, fail as UUID not set
+ # second call, fail to retrieve UUID
+ ('GET', 'application/applications', SRR['app_uuid']),
+ ('GET', 'application/applications/test_uuid/components', SRR['zero_records']),
+ # fail to retrieve UUID
+ ('GET', 'application/applications/test_uuid/components', SRR['generic_error']),
+ # fail to retrieve component_backing_storage
+ ('GET', 'application/applications/test_uuid/components', SRR['app_components']),
+ ('GET', 'application/applications/test_uuid/components/component_uuid', SRR['generic_error']),
+ ])
+ my_app = create_app()
+ assert my_app.get_application_component_backing_storage() == (None, 'function should not be called before application uuid is set.')
+ # second call, set UUI first
+ assert my_app.get_application_uuid() == ('test_uuid', None)
+ assert my_app.get_application_component_backing_storage() == (None, 'no component for application application_name')
+ # third call
+ assert my_app.get_application_component_backing_storage() == (None, rest_error_message('', 'application/applications/test_uuid/components'))
+ # fourth call
+ assert my_app.get_application_component_backing_storage() == (None, rest_error_message('', 'application/applications/test_uuid/components/component_uuid'))
+
+
+def test_create_application_body():
+ my_app = create_app()
+ body = {
+ 'name': my_app.app_name,
+ 'svm': {'name': my_app.svm_name},
+ 'smart_container': True,
+ 'tname': 'tbody'
+ }
+ assert my_app.create_application_body('tname', 'tbody') == (body, None)
+ body['smart_container'] = False
+ assert my_app.create_application_body('tname', 'tbody', False) == (body, None)
+ assert my_app.create_application_body('tname', 'tbody', 'False') == (None, 'expecting bool value for smart_container, got: False')
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_rest_generic.py b/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_rest_generic.py
new file mode 100644
index 000000000..b2b42ed97
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_rest_generic.py
@@ -0,0 +1,492 @@
+# Copyright (c) 2022 NetApp
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests for module_utils rest_generic.py - REST features '''
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import pytest
+import sys
+
+from ansible.module_utils import basic
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import patch_ansible, create_module
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+from ansible_collections.netapp.ontap.plugins.module_utils import rest_generic
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+SRR = rest_responses({
+ 'vservers_with_admin': (200, {
+ 'records': [
+ {'vserver': 'vserver1', 'type': 'data '},
+ {'vserver': 'vserver2', 'type': 'data '},
+ {'vserver': 'cserver', 'type': 'admin'}
+ ]}, None),
+ 'vservers_single': (200, {
+ 'records': [
+ {'vserver': 'single', 'type': 'data '},
+ ]}, None),
+ 'accepted_response': (202, {
+ 'job': {
+ 'uuid': 'd0b3eefe-cd59-11eb-a170-005056b338cd',
+ '_links': {'self': {'href': '/api/cluster/jobs/d0b3eefe-cd59-11eb-a170-005056b338cd'}}
+ }}, None),
+ 'job_in_progress': (200, {
+ 'job': {
+ 'uuid': 'a1b2c3_job',
+ '_links': {'self': {'href': 'api/some_link'}}
+ }}, None),
+ 'job_success': (200, {
+ 'state': 'success',
+ 'message': 'success_message',
+ 'job': {
+ 'uuid': 'a1b2c3_job',
+ '_links': {'self': {'href': 'some_link'}}
+ }}, None),
+ 'job_failed': (200, {
+ 'state': 'error',
+ 'message': 'error_message',
+ 'job': {
+ 'uuid': 'a1b2c3_job',
+ '_links': {'self': {'href': 'some_link'}}
+ }}, None),
+})
+
+DEFAULT_ARGS = {
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'cert_filepath': None,
+ 'key_filepath': None,
+}
+
+CERT_ARGS = {
+ 'hostname': 'test',
+ 'cert_filepath': 'test_pem.pem',
+ 'key_filepath': 'test_key.key'
+}
+
+
+class MockONTAPModule:
+ def __init__(self):
+ self.module = basic.AnsibleModule(netapp_utils.na_ontap_host_argument_spec())
+
+
+def create_restapi_object(default_args, module_args=None):
+ module = create_module(MockONTAPModule, default_args, module_args)
+ return netapp_utils.OntapRestAPI(module.module)
+
+
+def test_build_query_with_fields():
+ assert rest_generic.build_query_with_fields(None, None) is None
+ assert rest_generic.build_query_with_fields(query=None, fields=None) is None
+ assert rest_generic.build_query_with_fields(query={'aaa': 'vvv'}, fields=None) == {'aaa': 'vvv'}
+ assert rest_generic.build_query_with_fields(query=None, fields='aaa,bbb') == {'fields': 'aaa,bbb'}
+ assert rest_generic.build_query_with_fields(query={'aaa': 'vvv'}, fields='aaa,bbb') == {'aaa': 'vvv', 'fields': 'aaa,bbb'}
+
+
+def test_build_query_with_timeout():
+ assert rest_generic.build_query_with_timeout(query=None, timeout=30) == {'return_timeout': 30}
+
+ # when timeout is 0, return_timeout is not added
+ assert rest_generic.build_query_with_timeout(query=None, timeout=0) is None
+ assert rest_generic.build_query_with_timeout(query={'aaa': 'vvv'}, timeout=0) == {'aaa': 'vvv'}
+
+ # when return_timeout is in the query, it has precedence
+ query = {'return_timeout': 55}
+ assert rest_generic.build_query_with_timeout(query, timeout=0) == query
+ assert rest_generic.build_query_with_timeout(query, timeout=20) == query
+ query = {'aaa': 'vvv', 'return_timeout': 55}
+ assert rest_generic.build_query_with_timeout(query, timeout=0) == query
+ assert rest_generic.build_query_with_timeout(query, timeout=20) == query
+
+
+def test_successful_get_one_record_no_records_field():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1'])
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ record, error = rest_generic.get_one_record(rest_api, 'cluster')
+ assert error is None
+ assert record == SRR['is_rest_9_10_1'][1]
+
+
+def test_successful_get_one_record():
+ register_responses([
+ ('GET', 'cluster', SRR['vservers_single'])
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ record, error = rest_generic.get_one_record(rest_api, 'cluster')
+ assert error is None
+ assert record == SRR['vservers_single'][1]['records'][0]
+
+
+def test_successful_get_one_record_no_record():
+ register_responses([
+ ('GET', 'cluster', SRR['zero_records'])
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ record, error = rest_generic.get_one_record(rest_api, 'cluster')
+ assert error is None
+ assert record is None
+
+
+def test_successful_get_one_record_NN():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1'])
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ record, error = rest_generic.get_one_record(rest_api, 'cluster', query=None, fields=None)
+ assert error is None
+ assert record == SRR['is_rest_9_10_1'][1]
+
+
+def test_successful_get_one_record_NV():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1'])
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ record, error = rest_generic.get_one_record(rest_api, 'cluster', query=None, fields='aaa,bbb')
+ assert error is None
+ assert record == SRR['is_rest_9_10_1'][1]
+
+
+def test_successful_get_one_record_VN():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1'])
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ record, error = rest_generic.get_one_record(rest_api, 'cluster', query={'aaa': 'value'}, fields=None)
+ assert error is None
+ assert record == SRR['is_rest_9_10_1'][1]
+
+
+def test_successful_get_one_record_VV():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1'])
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ record, error = rest_generic.get_one_record(rest_api, 'cluster', query={'aaa': 'value'}, fields='aaa,bbb')
+ assert error is None
+ assert record == SRR['is_rest_9_10_1'][1]
+
+
+def test_error_get_one_record_empty():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest'])
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ record, error = rest_generic.get_one_record(rest_api, 'cluster', query=None, fields=None)
+ assert error == 'calling: cluster: no response {}.'
+ assert record is None
+
+
+def test_error_get_one_record_multiple():
+ register_responses([
+ ('GET', 'cluster', SRR['vservers_with_admin'])
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ record, error = rest_generic.get_one_record(rest_api, 'cluster', query={'aaa': 'vvv'}, fields=None)
+ assert "calling: cluster: unexpected response {'records':" in error
+ assert "for query: {'aaa': 'vvv'}" in error
+ assert record == SRR['vservers_with_admin'][1]
+
+
+def test_error_get_one_record_rest_error():
+ register_responses([
+ ('GET', 'cluster', SRR['generic_error'])
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ record, error = rest_generic.get_one_record(rest_api, 'cluster', query=None, fields=None)
+ assert error == 'calling: cluster: got Expected error.'
+ assert record is None
+
+
+def test_successful_get_0_or_more_records():
+ register_responses([
+ ('GET', 'cluster', SRR['vservers_with_admin'])
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ records, error = rest_generic.get_0_or_more_records(rest_api, 'cluster')
+ assert error is None
+ assert records == SRR['vservers_with_admin'][1]['records']
+
+
+def test_successful_get_0_or_more_records_NN():
+ register_responses([
+ ('GET', 'cluster', SRR['vservers_with_admin'])
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ records, error = rest_generic.get_0_or_more_records(rest_api, 'cluster', query=None, fields=None)
+ assert error is None
+ assert records == SRR['vservers_with_admin'][1]['records']
+
+
+def test_successful_get_0_or_more_records_NV():
+ register_responses([
+ ('GET', 'cluster', SRR['vservers_with_admin'])
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ records, error = rest_generic.get_0_or_more_records(rest_api, 'cluster', query=None, fields='aaa,bbb')
+ assert error is None
+ assert records == SRR['vservers_with_admin'][1]['records']
+
+
+def test_successful_get_0_or_more_records_VN():
+ register_responses([
+ ('GET', 'cluster', SRR['vservers_with_admin'])
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ records, error = rest_generic.get_0_or_more_records(rest_api, 'cluster', query={'aaa': 'value'}, fields=None)
+ assert error is None
+ assert records == SRR['vservers_with_admin'][1]['records']
+
+
+def test_successful_get_0_or_more_records_VV():
+ register_responses([
+ ('GET', 'cluster', SRR['vservers_with_admin'])
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ records, error = rest_generic.get_0_or_more_records(rest_api, 'cluster', query={'aaa': 'value'}, fields='aaa,bbb')
+ assert error is None
+ assert records == SRR['vservers_with_admin'][1]['records']
+
+
+def test_successful_get_0_or_more_records_VV_1_record():
+ register_responses([
+ ('GET', 'cluster', SRR['vservers_single'])
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ records, error = rest_generic.get_0_or_more_records(rest_api, 'cluster', query={'aaa': 'value'}, fields='aaa,bbb')
+ assert error is None
+ assert records == SRR['vservers_single'][1]['records']
+
+
+def test_successful_get_0_or_more_records_VV_0_record():
+ register_responses([
+ ('GET', 'cluster', SRR['zero_records'])
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ records, error = rest_generic.get_0_or_more_records(rest_api, 'cluster', query={'aaa': 'value'}, fields='aaa,bbb')
+ assert error is None
+ assert records is None
+
+
+def test_error_get_0_or_more_records():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest'])
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ records, error = rest_generic.get_0_or_more_records(rest_api, 'cluster', query=None, fields=None)
+ assert error == 'calling: cluster: no response {}.'
+ assert records is None
+
+
+def test_error_get_0_or_more_records_rest_error():
+ register_responses([
+ ('GET', 'cluster', SRR['generic_error'])
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ records, error = rest_generic.get_0_or_more_records(rest_api, 'cluster', query=None, fields=None)
+ assert error == 'calling: cluster: got Expected error.'
+ assert records is None
+
+
+def test_successful_post_async():
+ register_responses([
+ ('POST', 'cluster', SRR['vservers_single'])
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ response, error = rest_generic.post_async(rest_api, 'cluster', {})
+ assert error is None
+ assert response == SRR['vservers_single'][1]
+
+
+def test_error_post_async():
+ register_responses([
+ ('POST', 'cluster', SRR['generic_error'])
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ response, error = rest_generic.post_async(rest_api, 'cluster', {})
+ assert error == 'calling: cluster: got Expected error.'
+ assert response is None
+
+
+@patch('time.sleep')
+def test_successful_post_async_with_job(dont_sleep):
+ register_responses([
+ ('POST', 'cluster', SRR['accepted_response']),
+ ('GET', 'cluster/jobs/d0b3eefe-cd59-11eb-a170-005056b338cd', SRR['job_in_progress']),
+ ('GET', 'cluster/jobs/d0b3eefe-cd59-11eb-a170-005056b338cd', SRR['job_in_progress']),
+ ('GET', 'cluster/jobs/d0b3eefe-cd59-11eb-a170-005056b338cd', SRR['job_success'])
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ response, error = rest_generic.post_async(rest_api, 'cluster', {})
+ assert error is None
+ assert 'job_response' in response
+ assert response['job_response'] == 'success_message'
+
+
+@patch('time.sleep')
+def test_successful_post_async_with_job_failure(dont_sleep):
+ register_responses([
+ ('POST', 'cluster', SRR['accepted_response']),
+ ('GET', 'cluster/jobs/d0b3eefe-cd59-11eb-a170-005056b338cd', SRR['job_in_progress']),
+ ('GET', 'cluster/jobs/d0b3eefe-cd59-11eb-a170-005056b338cd', SRR['job_in_progress']),
+ ('GET', 'cluster/jobs/d0b3eefe-cd59-11eb-a170-005056b338cd', SRR['job_failed'])
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ response, error = rest_generic.post_async(rest_api, 'cluster', {})
+ assert error is None
+ assert 'job_response' in response
+ assert response['job_response'] == 'error_message'
+
+
+@patch('time.sleep')
+def test_error_post_async_with_job(dont_sleep):
+ register_responses([
+ ('POST', 'cluster', SRR['accepted_response']),
+ ('GET', 'cluster/jobs/d0b3eefe-cd59-11eb-a170-005056b338cd', SRR['job_in_progress']),
+ ('GET', 'cluster/jobs/d0b3eefe-cd59-11eb-a170-005056b338cd', SRR['job_in_progress']),
+ ('GET', 'cluster/jobs/d0b3eefe-cd59-11eb-a170-005056b338cd', SRR['generic_error']),
+ ('GET', 'cluster/jobs/d0b3eefe-cd59-11eb-a170-005056b338cd', SRR['generic_error']),
+ ('GET', 'cluster/jobs/d0b3eefe-cd59-11eb-a170-005056b338cd', SRR['generic_error']),
+ ('GET', 'cluster/jobs/d0b3eefe-cd59-11eb-a170-005056b338cd', SRR['generic_error'])
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ response, error = rest_generic.post_async(rest_api, 'cluster', {})
+ assert 'job reported error: Expected error - Expected error - Expected error - Expected error, received' in error
+ assert response == SRR['accepted_response'][1]
+
+
+def test_successful_patch_async():
+ register_responses([
+ ('PATCH', 'cluster/uuid', SRR['vservers_single'])
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ response, error = rest_generic.patch_async(rest_api, 'cluster', 'uuid', {})
+ assert error is None
+ assert response == SRR['vservers_single'][1]
+
+
+def test_error_patch_async():
+ register_responses([
+ ('PATCH', 'cluster/uuid', SRR['generic_error'])
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ response, error = rest_generic.patch_async(rest_api, 'cluster', 'uuid', {})
+ assert error == 'calling: cluster/uuid: got Expected error.'
+ assert response is None
+
+
+@patch('time.sleep')
+def test_successful_patch_async_with_job(dont_sleep):
+ register_responses([
+ ('PATCH', 'cluster/uuid', SRR['accepted_response']),
+ ('GET', 'cluster/jobs/d0b3eefe-cd59-11eb-a170-005056b338cd', SRR['job_in_progress']),
+ ('GET', 'cluster/jobs/d0b3eefe-cd59-11eb-a170-005056b338cd', SRR['job_success'])
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ response, error = rest_generic.patch_async(rest_api, 'cluster', 'uuid', {})
+ assert error is None
+ assert 'job_response' in response
+ assert response['job_response'] == 'success_message'
+
+
+@patch('time.sleep')
+def test_successful_patch_async_with_job_failure(dont_sleep):
+ register_responses([
+ ('PATCH', 'cluster/uuid', SRR['accepted_response']),
+ ('GET', 'cluster/jobs/d0b3eefe-cd59-11eb-a170-005056b338cd', SRR['job_in_progress']),
+ ('GET', 'cluster/jobs/d0b3eefe-cd59-11eb-a170-005056b338cd', SRR['job_failed'])
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ response, error = rest_generic.patch_async(rest_api, 'cluster', 'uuid', {})
+ assert error is None
+ assert 'job_response' in response
+ assert response['job_response'] == 'error_message'
+
+
+@patch('time.sleep')
+def test_error_patch_async_with_job(dont_sleep):
+ register_responses([
+ ('PATCH', 'cluster/uuid', SRR['accepted_response']),
+ ('GET', 'cluster/jobs/d0b3eefe-cd59-11eb-a170-005056b338cd', SRR['job_in_progress']),
+ ('GET', 'cluster/jobs/d0b3eefe-cd59-11eb-a170-005056b338cd', SRR['generic_error']),
+ ('GET', 'cluster/jobs/d0b3eefe-cd59-11eb-a170-005056b338cd', SRR['generic_error']),
+ ('GET', 'cluster/jobs/d0b3eefe-cd59-11eb-a170-005056b338cd', SRR['generic_error']),
+ ('GET', 'cluster/jobs/d0b3eefe-cd59-11eb-a170-005056b338cd', SRR['generic_error'])
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ response, error = rest_generic.patch_async(rest_api, 'cluster', 'uuid', {})
+ assert 'job reported error: Expected error - Expected error - Expected error - Expected error, received' in error
+ assert response == SRR['accepted_response'][1]
+
+
+def test_successful_delete_async():
+ register_responses([
+ ('DELETE', 'cluster/uuid', SRR['vservers_single'])
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ response, error = rest_generic.delete_async(rest_api, 'cluster', 'uuid')
+ assert error is None
+ assert response == SRR['vservers_single'][1]
+
+
+def test_error_delete_async():
+ register_responses([
+ ('DELETE', 'cluster/uuid', SRR['generic_error'])
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ response, error = rest_generic.delete_async(rest_api, 'cluster', 'uuid')
+ assert error == 'calling: cluster/uuid: got Expected error.'
+ assert response is None
+
+
+@patch('time.sleep')
+def test_successful_delete_async_with_job(dont_sleep):
+ register_responses([
+ ('DELETE', 'cluster/uuid', SRR['accepted_response']),
+ ('GET', 'cluster/jobs/d0b3eefe-cd59-11eb-a170-005056b338cd', SRR['job_in_progress']),
+ ('GET', 'cluster/jobs/d0b3eefe-cd59-11eb-a170-005056b338cd', SRR['job_success'])
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ response, error = rest_generic.delete_async(rest_api, 'cluster', 'uuid')
+ assert error is None
+ assert 'job_response' in response
+ assert response['job_response'] == 'success_message'
+
+
+@patch('time.sleep')
+def test_successful_delete_async_with_job_failure(dont_sleep):
+ register_responses([
+ ('DELETE', 'cluster/uuid', SRR['accepted_response']),
+ ('GET', 'cluster/jobs/d0b3eefe-cd59-11eb-a170-005056b338cd', SRR['job_in_progress']),
+ ('GET', 'cluster/jobs/d0b3eefe-cd59-11eb-a170-005056b338cd', SRR['job_failed'])
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ response, error = rest_generic.delete_async(rest_api, 'cluster', 'uuid')
+ assert error is None
+ assert 'job_response' in response
+ assert response['job_response'] == 'error_message'
+
+
+@patch('time.sleep')
+def test_error_delete_async_with_job(dont_sleep):
+ register_responses([
+ ('DELETE', 'cluster/uuid', SRR['accepted_response']),
+ ('GET', 'cluster/jobs/d0b3eefe-cd59-11eb-a170-005056b338cd', SRR['job_in_progress']),
+ ('GET', 'cluster/jobs/d0b3eefe-cd59-11eb-a170-005056b338cd', SRR['generic_error']),
+ ('GET', 'cluster/jobs/d0b3eefe-cd59-11eb-a170-005056b338cd', SRR['generic_error']),
+ ('GET', 'cluster/jobs/d0b3eefe-cd59-11eb-a170-005056b338cd', SRR['generic_error']),
+ ('GET', 'cluster/jobs/d0b3eefe-cd59-11eb-a170-005056b338cd', SRR['generic_error'])
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ response, error = rest_generic.delete_async(rest_api, 'cluster', 'uuid')
+ assert 'job reported error: Expected error - Expected error - Expected error - Expected error, received' in error
+ assert response == SRR['accepted_response'][1]
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_rest_owning_resource.py b/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_rest_owning_resource.py
new file mode 100644
index 000000000..a7465e8d2
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_rest_owning_resource.py
@@ -0,0 +1,98 @@
+# Copyright (c) 2022 NetApp
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests for module_utils rest_generic.py - REST features '''
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import sys
+
+from ansible.module_utils import basic
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import create_module, expect_and_capture_ansible_exception, patch_ansible
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_error_message, rest_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke, register_responses
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+from ansible_collections.netapp.ontap.plugins.module_utils import rest_owning_resource
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+SRR = rest_responses({
+ 'get_uuid_policy_id_export_policy': (
+ 200,
+ {
+ "records": [{
+ "svm": {
+ "uuid": "uuid",
+ "name": "svm"},
+ "id": 123,
+ "name": "ansible"
+ }],
+ "num_records": 1}, None),
+ 'get_uuid_from_volume': (
+ 200,
+ {
+ "records": [{
+ "svm": {
+ "uuid": "uuid",
+ "name": "svm"},
+ "uuid": "028baa66-41bd-11e9-81d5-00a0986138f7"
+ }]
+ }, None
+ )
+})
+
+DEFAULT_ARGS = {
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+}
+
+
+class MockONTAPModule:
+ def __init__(self):
+ self.module = basic.AnsibleModule(netapp_utils.na_ontap_host_argument_spec())
+
+
+def create_restapi_object(default_args, module_args=None):
+ module = create_module(MockONTAPModule, default_args, module_args)
+ return netapp_utils.OntapRestAPI(module.module)
+
+
+def test_get_policy_id():
+ register_responses([
+ ('GET', 'protocols/nfs/export-policies', SRR['get_uuid_policy_id_export_policy'])
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ record = rest_owning_resource.get_export_policy_id(rest_api, 'ansible', 'svm', rest_api.module)
+ assert record == SRR['get_uuid_policy_id_export_policy'][1]['records'][0]['id']
+
+
+def test_error_get_policy_id():
+ register_responses([
+ ('GET', 'protocols/nfs/export-policies', SRR['generic_error'])
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ error = 'Could not find export policy ansible on SVM svm'
+ assert error in expect_and_capture_ansible_exception(rest_owning_resource.get_export_policy_id, 'fail', rest_api, 'ansible', 'svm', rest_api.module)['msg']
+
+
+def test_get_volume_uuid():
+ register_responses([
+ ('GET', 'storage/volumes', SRR['get_uuid_from_volume'])
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ record = rest_owning_resource.get_volume_uuid(rest_api, 'ansible', 'svm', rest_api.module)
+ assert record == SRR['get_uuid_from_volume'][1]['records'][0]['uuid']
+
+
+def test_error_get_volume_uuid():
+ register_responses([
+ ('GET', 'storage/volumes', SRR['generic_error'])
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ error = 'Could not find volume ansible on SVM svm'
+ assert error in expect_and_capture_ansible_exception(rest_owning_resource.get_volume_uuid, 'fail', rest_api, 'ansible', 'svm', rest_api.module)['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_rest_volume.py b/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_rest_volume.py
new file mode 100644
index 000000000..0c1a77e7f
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_rest_volume.py
@@ -0,0 +1,233 @@
+# This code is part of Ansible, but is an independent component.
+# This particular file snippet, and this file snippet only, is BSD licensed.
+# Modules you write using this snippet, which is embedded dynamically by Ansible
+# still belong to the author of the module, and may assign their own license
+# to the complete work.
+#
+# Copyright (c) 2021, Laurent Nicolas <laurentn@netapp.com>
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without modification,
+# are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
+# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
+# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+""" unit tests for module_utils netapp_module.py
+
+ Provides wrappers for storage/volumes REST APIs
+"""
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import copy
+import pytest
+import sys
+
+# from ansible_collections.netapp.ontap.tests.unit.compat import unittest
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch, call
+from ansible_collections.netapp.ontap.plugins.module_utils import rest_volume
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+
+# REST API canned responses when mocking send_request
+SRR = {
+ # common responses
+ 'is_rest': (200, dict(version=dict(generation=9, major=9, minor=0, full='dummy')), None),
+ 'is_rest_9_8': (200, dict(version=dict(generation=9, major=8, minor=0, full='dummy')), None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'zero_record': (200, dict(records=[], num_records=0), None),
+ 'one_record_uuid': (200, dict(records=[dict(uuid='a1b2c3')], num_records=1), None),
+ 'end_of_sequence': (500, None, "Unexpected call to send_request"),
+ 'generic_error': (400, None, "Expected error"),
+ 'one_volume_record': (200, dict(records=[
+ dict(uuid='a1b2c3',
+ name='test',
+ svm=dict(name='vserver'),
+ )
+ ], num_records=1), None),
+ 'three_volume_records': (200, dict(records=[
+ dict(uuid='a1b2c3_1',
+ name='test1',
+ svm=dict(name='vserver'),
+ ),
+ dict(uuid='a1b2c3_2',
+ name='test2',
+ svm=dict(name='vserver'),
+ ),
+ dict(uuid='a1b2c3_3',
+ name='test3',
+ svm=dict(name='vserver'),
+ )
+ ], num_records=3), None),
+ 'job': (200, {
+ 'job': {
+ 'uuid': 'a1b2c3_job',
+ '_links': {'self': {'href': 'api/some_link'}}
+ }}, None),
+ 'job_bad_url': (200, {
+ 'job': {
+ 'uuid': 'a1b2c3_job',
+ '_links': {'self': {'href': 'some_link'}}
+ }}, None),
+ 'job_status_success': (200, {
+ 'state': 'success',
+ 'message': 'success_message',
+ 'job': {
+ 'uuid': 'a1b2c3_job',
+ '_links': {'self': {'href': 'some_link'}}
+ }}, None),
+}
+
+
+class AnsibleFailJson(Exception):
+ """Exception class to be raised by module.fail_json and caught by the test case"""
+
+
+class MockModule(object):
+ ''' rough mock for an Ansible module class '''
+ def __init__(self):
+ self.params = dict(
+ username='my_username',
+ password='my_password',
+ hostname='my_hostname',
+ use_rest='my_use_rest',
+ cert_filepath=None,
+ key_filepath=None,
+ validate_certs='my_validate_certs',
+ http_port=None,
+ feature_flags=None,
+ )
+
+ def fail_json(self, *args, **kwargs): # pylint: disable=unused-argument
+ """function to simulate fail_json: package return data into an exception"""
+ kwargs['failed'] = True
+ raise AnsibleFailJson(kwargs)
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_get_volumes_none(mock_request):
+ module = MockModule()
+ rest_api = netapp_utils.OntapRestAPI(module)
+ mock_request.side_effect = [
+ SRR['zero_record'],
+ SRR['end_of_sequence']]
+ volumes, error = rest_volume.get_volumes(rest_api)
+ assert error is None
+ assert volumes is None
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_get_volumes_one(mock_request):
+ module = MockModule()
+ rest_api = netapp_utils.OntapRestAPI(module)
+ mock_request.side_effect = [
+ SRR['one_volume_record'],
+ SRR['end_of_sequence']]
+ volumes, error = rest_volume.get_volumes(rest_api, 'vserver', 'name')
+ assert error is None
+ assert volumes == [SRR['one_volume_record'][1]['records'][0]]
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_get_volumes_three(mock_request):
+ module = MockModule()
+ rest_api = netapp_utils.OntapRestAPI(module)
+ mock_request.side_effect = [
+ SRR['three_volume_records'],
+ SRR['end_of_sequence']]
+ volumes, error = rest_volume.get_volumes(rest_api)
+ assert error is None
+ assert volumes == [SRR['three_volume_records'][1]['records'][x] for x in (0, 1, 2)]
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_get_volume_not_found(mock_request):
+ module = MockModule()
+ rest_api = netapp_utils.OntapRestAPI(module)
+ mock_request.side_effect = [
+ SRR['zero_record'],
+ SRR['end_of_sequence']]
+ volume, error = rest_volume.get_volume(rest_api, 'name', 'vserver')
+ assert error is None
+ assert volume is None
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_get_volume_found(mock_request):
+ module = MockModule()
+ rest_api = netapp_utils.OntapRestAPI(module)
+ mock_request.side_effect = [
+ SRR['one_volume_record'],
+ SRR['end_of_sequence']]
+ volume, error = rest_volume.get_volume(rest_api, 'name', 'vserver')
+ assert error is None
+ assert volume == SRR['one_volume_record'][1]['records'][0]
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_get_volume_too_many(mock_request):
+ module = MockModule()
+ rest_api = netapp_utils.OntapRestAPI(module)
+ mock_request.side_effect = [
+ SRR['three_volume_records'],
+ SRR['end_of_sequence']]
+ dummy, error = rest_volume.get_volume(rest_api, 'name', 'vserver')
+ expected = "calling: storage/volumes: unexpected response"
+ assert expected in error
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_patch_volume_async(mock_request):
+ module = MockModule()
+ rest_api = netapp_utils.OntapRestAPI(module)
+ mock_request.side_effect = [
+ copy.deepcopy(SRR['job']), # deepcopy as job is modified in place!
+ SRR['job_status_success'],
+ SRR['end_of_sequence']]
+ body = dict(a1=1, a2=True, a3='str')
+ response, error = rest_volume.patch_volume(rest_api, 'uuid', body)
+ job = dict(SRR['job'][1]) # deepcopy as job is modified in place!
+ job['job_response'] = SRR['job_status_success'][1]['message']
+ assert error is None
+ assert response == job
+ expected = call('PATCH', 'storage/volumes/uuid', {'return_timeout': 30}, json=body, headers=None, files=None)
+ assert expected in mock_request.mock_calls
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_patch_volume_async_with_query(mock_request):
+ module = MockModule()
+ rest_api = netapp_utils.OntapRestAPI(module)
+ mock_request.side_effect = [
+ copy.deepcopy(SRR['job']), # deepcopy as job is modified in place!
+ SRR['job_status_success'],
+ SRR['end_of_sequence']]
+ body = dict(a1=1, a2=True, a3='str')
+ query = dict(return_timeout=20)
+ response, error = rest_volume.patch_volume(rest_api, 'uuid', body, query)
+ job = dict(SRR['job'][1]) # deepcopy as job is modified in place!
+ job['job_response'] = SRR['job_status_success'][1]['message']
+ assert error is None
+ assert response == job
+ expected = call('PATCH', 'storage/volumes/uuid', {'return_timeout': 20}, json=body, headers=None, files=None)
+ assert expected in mock_request.mock_calls
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_rest_vserver.py b/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_rest_vserver.py
new file mode 100644
index 000000000..c646abed2
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/module_utils/test_rest_vserver.py
@@ -0,0 +1,120 @@
+# This code is part of Ansible, but is an independent component.
+# This particular file snippet, and this file snippet only, is BSD licensed.
+# Modules you write using this snippet, which is embedded dynamically by Ansible
+# still belong to the author of the module, and may assign their own license
+# to the complete work.
+#
+# Copyright (c) 2022, Laurent Nicolas <laurentn@netapp.com>
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without modification,
+# are permitted provided that the following conditions are met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above copyright notice,
+# this list of conditions and the following disclaimer in the documentation
+# and/or other materials provided with the distribution.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
+# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
+# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
+# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
+# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+""" unit tests for module_utils rest_vserver.py
+
+ Provides wrappers for svm/svms REST APIs
+"""
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import pytest
+import sys
+
+from ansible.module_utils import basic
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import create_module, expect_and_capture_ansible_exception, patch_ansible
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_error_message, rest_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke, register_responses
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+
+from ansible_collections.netapp.ontap.plugins.module_utils import rest_vserver
+
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+
+# REST API canned responses when mocking send_request
+SRR = rest_responses({
+ 'svm_uuid': (200, {"records": [{"uuid": "test_uuid"}], "num_records": 1}, None),
+})
+
+
+DEFAULT_ARGS = {
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'cert_filepath': None,
+ 'key_filepath': None,
+}
+
+
+class MockONTAPModule:
+ def __init__(self):
+ self.module = basic.AnsibleModule(netapp_utils.na_ontap_host_argument_spec())
+
+
+def create_restapi_object(default_args, module_args=None):
+ module = create_module(MockONTAPModule, default_args, module_args)
+ return netapp_utils.OntapRestAPI(module.module)
+
+
+def test_successfully_get_vserver():
+ register_responses([
+ ('GET', 'svm/svms', SRR['svm_uuid']),
+ ('GET', 'svm/svms', SRR['zero_records']),
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ assert rest_vserver.get_vserver(rest_api, 'svm_name') == ({'uuid': 'test_uuid'}, None)
+ assert rest_vserver.get_vserver(rest_api, 'svm_name') == (None, None)
+
+
+def test_negative_get_vserver():
+ register_responses([
+ ('GET', 'svm/svms', SRR['generic_error']),
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ assert rest_vserver.get_vserver(rest_api, 'svm_name') == (None, rest_error_message('', 'svm/svms'))
+
+
+def test_successfully_get_vserver_uuid():
+ register_responses([
+ ('GET', 'svm/svms', SRR['svm_uuid']),
+ ('GET', 'svm/svms', SRR['zero_records']),
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ assert rest_vserver.get_vserver_uuid(rest_api, 'svm_name') == ('test_uuid', None)
+ assert rest_vserver.get_vserver_uuid(rest_api, 'svm_name') == (None, None)
+
+
+def test_negative_get_vserver_uuid():
+ register_responses([
+ ('GET', 'svm/svms', SRR['generic_error']),
+ ('GET', 'svm/svms', SRR['generic_error']),
+ ('GET', 'svm/svms', SRR['zero_records']),
+ ('GET', 'svm/svms', SRR['zero_records']),
+ ])
+ rest_api = create_restapi_object(DEFAULT_ARGS)
+ assert rest_vserver.get_vserver_uuid(rest_api, 'svm_name') == (None, rest_error_message('', 'svm/svms'))
+ assert expect_and_capture_ansible_exception(rest_vserver.get_vserver_uuid, 'fail', rest_api, 'svm_name', rest_api.module)['msg'] ==\
+ rest_error_message('Error fetching vserver svm_name', 'svm/svms')
+ assert rest_vserver.get_vserver_uuid(rest_api, 'svm_name', error_on_none=True) == (None, 'vserver svm_name does not exist or is not a data vserver.')
+ assert expect_and_capture_ansible_exception(rest_vserver.get_vserver_uuid, 'fail', rest_api, 'svm_name', rest_api.module, error_on_none=True)['msg'] ==\
+ 'Error vserver svm_name does not exist or is not a data vserver.'
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/.pytest_cache/.gitignore b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/.pytest_cache/.gitignore
new file mode 100644
index 000000000..bc1a1f616
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/.pytest_cache/.gitignore
@@ -0,0 +1,2 @@
+# Created by pytest automatically.
+*
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/.pytest_cache/CACHEDIR.TAG b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/.pytest_cache/CACHEDIR.TAG
new file mode 100644
index 000000000..fce15ad7e
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/.pytest_cache/CACHEDIR.TAG
@@ -0,0 +1,4 @@
+Signature: 8a477f597d28d172789f06886806bc55
+# This file is a cache directory tag created by pytest.
+# For information about cache directory tags, see:
+# https://bford.info/cachedir/spec.html
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/.pytest_cache/README.md b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/.pytest_cache/README.md
new file mode 100644
index 000000000..b89018ced
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/.pytest_cache/README.md
@@ -0,0 +1,8 @@
+# pytest cache directory #
+
+This directory contains data from the pytest's cache plugin,
+which provides the `--lf` and `--ff` options, as well as the `cache` fixture.
+
+**Do not** commit this to version control.
+
+See [the docs](https://docs.pytest.org/en/stable/how-to/cache.html) for more information.
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/.pytest_cache/v/cache/lastfailed b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/.pytest_cache/v/cache/lastfailed
new file mode 100644
index 000000000..ba7b58d20
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/.pytest_cache/v/cache/lastfailed
@@ -0,0 +1,3 @@
+{
+ "test_na_ontap_lun_rest.py": true
+} \ No newline at end of file
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/.pytest_cache/v/cache/nodeids b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/.pytest_cache/v/cache/nodeids
new file mode 100644
index 000000000..ca22cf9ee
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/.pytest_cache/v/cache/nodeids
@@ -0,0 +1,6 @@
+[
+ "test_na_ontap_lun.py::TestMyModule::test_create_error_missing_param",
+ "test_na_ontap_lun.py::TestMyModule::test_module_fail_when_required_args_missing",
+ "test_na_ontap_lun_rest.py::TestMyModule::test_create_error_missing_param",
+ "test_na_ontap_lun_rest.py::TestMyModule::test_successful_create_appli"
+] \ No newline at end of file
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/.pytest_cache/v/cache/stepwise b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/.pytest_cache/v/cache/stepwise
new file mode 100644
index 000000000..0637a088a
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/.pytest_cache/v/cache/stepwise
@@ -0,0 +1 @@
+[] \ No newline at end of file
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_active_directory.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_active_directory.py
new file mode 100644
index 000000000..7e108b081
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_active_directory.py
@@ -0,0 +1,311 @@
+# (c) 2018-2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test for ONTAP Ansible module na_ontap_active_directory '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import \
+ set_module_args, AnsibleExitJson, AnsibleFailJson, patch_ansible, create_module, expect_and_capture_ansible_exception
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke, register_responses, get_mock_record
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import zapi_responses, build_zapi_response
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_active_directory \
+ import NetAppOntapActiveDirectory as my_module, main as my_main # module under test
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+
+# not available on 2.6 anymore
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+def default_args(use_rest='never'):
+ return {
+ 'hostname': '10.10.10.10',
+ 'username': 'admin',
+ 'https': 'true',
+ 'validate_certs': 'false',
+ 'password': 'password',
+ 'account_name': 'account_name',
+ 'vserver': 'vserver',
+ 'admin_password': 'admin_password',
+ 'admin_username': 'admin_username',
+ 'use_rest': use_rest
+ }
+
+
+ad_info = {
+ 'attributes-list': {
+ 'active-directory-account-config': {
+ 'account-name': 'account_name',
+ 'domain': 'current.domain',
+ 'organizational-unit': 'current.ou',
+ }
+ }
+}
+
+
+ZRR = zapi_responses(
+ {'ad': build_zapi_response(ad_info, 1)}
+)
+
+SRR = rest_responses({
+ 'ad_1': (200, {"records": [{
+ "fqdn": "server1.com",
+ "name": "account_name",
+ "organizational_unit": "CN=Test",
+ "svm": {"name": "svm1", "uuid": "02c9e252"}
+ }], "num_records": 1}, None),
+ 'ad_2': (200, {"records": [{
+ "fqdn": "server2.com",
+ "name": "account_name",
+ "organizational_unit": "CN=Test",
+ "svm": {"name": "svm1", "uuid": "02c9e252"}
+ }], "num_records": 1}, None)
+})
+
+
+def test_success_create():
+ ''' test get'''
+ args = dict(default_args())
+ args['domain'] = 'some.domain'
+ args['force_account_overwrite'] = True
+ args['organizational_unit'] = 'some.OU'
+ set_module_args(args)
+ register_responses([
+ # list of tuples: (expected ZAPI, response)
+ ('active-directory-account-get-iter', ZRR['success']),
+ ('active-directory-account-create', ZRR['success']),
+ ])
+
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_main()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed']
+
+
+def test_fail_create_zapi_error():
+ ''' test get'''
+ args = dict(default_args())
+ set_module_args(args)
+ register_responses([
+ # list of tuples: (expected ZAPI, response)
+ ('active-directory-account-get-iter', ZRR['success']),
+ ('active-directory-account-create', ZRR['error']),
+ ])
+
+ my_obj = my_module()
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ msg = 'Error creating vserver Active Directory account_name: NetApp API failed. Reason - 12345:synthetic error for UT purpose'
+ assert msg == exc.value.args[0]['msg']
+
+
+def test_success_delete():
+ ''' test get'''
+ args = dict(default_args())
+ args['state'] = 'absent'
+ set_module_args(args)
+ register_responses([
+ # list of tuples: (expected ZAPI, response)
+ ('active-directory-account-get-iter', ZRR['ad']),
+ ('active-directory-account-delete', ZRR['success']),
+ ])
+
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed']
+
+
+def test_fail_delete_zapi_error():
+ ''' test get'''
+ args = dict(default_args())
+ args['state'] = 'absent'
+ set_module_args(args)
+ register_responses([
+ # list of tuples: (expected ZAPI, response)
+ ('active-directory-account-get-iter', ZRR['ad']),
+ ('active-directory-account-delete', ZRR['error']),
+ ])
+
+ my_obj = my_module()
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ msg = 'Error deleting vserver Active Directory account_name: NetApp API failed. Reason - 12345:synthetic error for UT purpose'
+ assert msg == exc.value.args[0]['msg']
+
+
+def test_success_modify():
+ ''' test get'''
+ args = dict(default_args())
+ args['domain'] = 'some.other.domain'
+ args['force_account_overwrite'] = True
+ set_module_args(args)
+ register_responses([
+ # list of tuples: (expected ZAPI, response)
+ ('active-directory-account-get-iter', ZRR['ad']),
+ ('active-directory-account-modify', ZRR['success']),
+ ])
+
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed']
+
+
+def test_fail_modify_zapi_error():
+ ''' test get'''
+ args = dict(default_args())
+ args['domain'] = 'some.other.domain'
+ set_module_args(args)
+ register_responses([
+ # list of tuples: (expected ZAPI, response)
+ ('active-directory-account-get-iter', ZRR['ad']),
+ ('active-directory-account-modify', ZRR['error']),
+ ])
+
+ my_obj = my_module()
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ msg = 'Error modifying vserver Active Directory account_name: NetApp API failed. Reason - 12345:synthetic error for UT purpose'
+ assert msg == exc.value.args[0]['msg']
+
+
+def test_fail_modify_on_ou():
+ ''' test get'''
+ args = dict(default_args())
+ args['organizational_unit'] = 'some.other.OU'
+ set_module_args(args)
+ register_responses([
+ # list of tuples: (expected ZAPI, response)
+ ('active-directory-account-get-iter', ZRR['ad']),
+ ])
+
+ my_obj = my_module()
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ msg = "Error: organizational_unit cannot be modified; found {'organizational_unit': 'some.other.OU'}."
+ assert msg == exc.value.args[0]['msg']
+
+
+def test_fail_on_get_zapi_error():
+ ''' test get'''
+ args = dict(default_args())
+ set_module_args(args)
+ register_responses([
+ # list of tuples: (expected ZAPI, response)
+ ('active-directory-account-get-iter', ZRR['error']),
+ ])
+ my_obj = my_module()
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ msg = 'Error searching for Active Directory account_name: NetApp API failed. Reason - 12345:synthetic error for UT purpose'
+ assert msg == exc.value.args[0]['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.has_netapp_lib')
+def test_fail_netapp_lib_error(mock_has_netapp_lib):
+ ''' test get'''
+ args = dict(default_args())
+ set_module_args(args)
+ mock_has_netapp_lib.return_value = False
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_module()
+ assert 'Error: the python NetApp-Lib module is required. Import error: None' == exc.value.args[0]['msg']
+
+
+def test_fail_on_rest():
+ ''' test error with rest versions less than 9.12.1'''
+ args = dict(default_args('always'))
+ set_module_args(args)
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_12_0'])
+ ])
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_module()
+ assert 'Error: REST requires ONTAP 9.12.1 or later' in exc.value.args[0]['msg']
+
+
+def test_success_create_rest():
+ ''' test create'''
+ args = dict(default_args('always'))
+ args['domain'] = 'server1.com'
+ args['force_account_overwrite'] = True
+ args['organizational_unit'] = 'CN=Test'
+ set_module_args(args)
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_12_1']),
+ ('GET', 'protocols/active-directory', SRR['empty_records']),
+ ('POST', 'protocols/active-directory', SRR['success']),
+ ])
+
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_main()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed']
+
+
+def test_success_delete_rest():
+ ''' test delete rest'''
+ args = dict(default_args('always'))
+ args['state'] = 'absent'
+ set_module_args(args)
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_12_1']),
+ ('GET', 'protocols/active-directory', SRR['ad_1']),
+ ('DELETE', 'protocols/active-directory/02c9e252', SRR['success']),
+ ])
+
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed']
+
+
+def test_success_modify_rest():
+ ''' test modify rest'''
+ args = dict(default_args('always'))
+ args['domain'] = 'some.other.domain'
+ args['force_account_overwrite'] = True
+ set_module_args(args)
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_12_1']),
+ ('GET', 'protocols/active-directory', SRR['ad_1']),
+ ('PATCH', 'protocols/active-directory/02c9e252', SRR['success']),
+ ])
+
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed']
+
+
+def test_if_all_methods_catch_exception():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_12_1']),
+ ('GET', 'protocols/active-directory', SRR['generic_error']),
+ ('POST', 'protocols/active-directory', SRR['generic_error']),
+ ('PATCH', 'protocols/active-directory/02c9e252', SRR['generic_error']),
+ ('DELETE', 'protocols/active-directory/02c9e252', SRR['generic_error'])
+ ])
+ ad_obj = create_module(my_module, default_args('always'))
+ ad_obj.svm_uuid = '02c9e252'
+ assert 'Error searching for Active Directory' in expect_and_capture_ansible_exception(ad_obj.get_active_directory_rest, 'fail')['msg']
+ assert 'Error creating vserver Active Directory' in expect_and_capture_ansible_exception(ad_obj.create_active_directory_rest, 'fail')['msg']
+ assert 'Error modifying vserver Active Directory' in expect_and_capture_ansible_exception(ad_obj.modify_active_directory_rest, 'fail')['msg']
+ assert 'Error deleting vserver Active Directory' in expect_and_capture_ansible_exception(ad_obj.delete_active_directory_rest, 'fail')['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_active_directory_domain_controllers.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_active_directory_domain_controllers.py
new file mode 100644
index 000000000..cedbe0519
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_active_directory_domain_controllers.py
@@ -0,0 +1,177 @@
+# Copyright: NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+""" unit tests for Ansible module: na_ontap_active_directory_preferred_domain_controllers """
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import create_and_apply,\
+ patch_ansible, call_main
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_active_directory_domain_controllers \
+ import NetAppOntapActiveDirectoryDC as my_module, main as my_main # module under test
+
+# REST API canned responses when mocking send_request
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+# REST API canned responses when mocking send_request
+SRR = rest_responses({
+ # module specific responses
+ 'DC_record': (
+ 200,
+ {
+ "records": [
+ {
+ "fqdn": "example.com",
+ "server_ip": "10.10.10.10",
+ 'svm': {"uuid": "3d52ad89-c278-11ed-a7b0-005056b3ed56"},
+ }
+ ],
+ "num_records": 1
+ }, None
+ ),
+ 'svm_record': (
+ 200,
+ {
+ "records": [
+ {
+ "uuid": "3d52ad89-c278-11ed-a7b0-005056b3ed56",
+ }
+ ],
+ "num_records": 1
+ }, None
+ ),
+ "no_record": (
+ 200,
+ {"num_records": 0},
+ None)
+})
+
+
+ARGS_REST = {
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'use_rest': 'always',
+ 'vserver': 'ansible',
+ 'fqdn': 'example.com',
+ 'server_ip': '10.10.10.10'
+}
+
+
+def test_rest_error_get_svm():
+ '''Test error rest get svm'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_12_0']),
+ ('GET', 'svm/svms', SRR['generic_error']),
+ ])
+ error = call_main(my_main, ARGS_REST, fail=True)['msg']
+ msg = "Error fetching vserver ansible: calling: svm/svms: got Expected error."
+ assert msg in error
+
+
+def test_rest_error_get():
+ '''Test error rest get'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_12_0']),
+ ('GET', 'svm/svms', SRR['svm_record']),
+ ('GET', 'protocols/active-directory/3d52ad89-c278-11ed-a7b0-005056b3ed56/preferred-domain-controllers', SRR['generic_error']),
+ ])
+ error = call_main(my_main, ARGS_REST, fail=True)['msg']
+ msg = "Error on fetching Active Directory preferred DC configuration of an SVM:"
+ assert msg in error
+
+
+def test_rest_error_create_active_directory_preferred_domain_controllers():
+ '''Test error rest create active_directory preferred domain_controllers'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_12_0']),
+ ('GET', 'svm/svms', SRR['svm_record']),
+ ('GET', 'protocols/active-directory/3d52ad89-c278-11ed-a7b0-005056b3ed56/preferred-domain-controllers', SRR['empty_records']),
+ ('POST', 'protocols/active-directory/3d52ad89-c278-11ed-a7b0-005056b3ed56/preferred-domain-controllers', SRR['generic_error']),
+ ])
+ error = call_main(my_main, ARGS_REST, fail=True)['msg']
+ msg = "Error on adding Active Directory preferred DC configuration to an SVM:"
+ assert msg in error
+
+
+def test_rest_create_active_directory_preferred_domain_controllers():
+ '''Test rest create active_directory preferred domain_controllers'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_12_0']),
+ ('GET', 'svm/svms', SRR['svm_record']),
+ ('GET', 'protocols/active-directory/3d52ad89-c278-11ed-a7b0-005056b3ed56/preferred-domain-controllers', SRR['empty_records']),
+ ('POST', 'protocols/active-directory/3d52ad89-c278-11ed-a7b0-005056b3ed56/preferred-domain-controllers', SRR['empty_good']),
+ ])
+ module_args = {
+ 'fqdn': 'example.com',
+ 'server_ip': '10.10.10.10'
+ }
+ assert call_main(my_main, ARGS_REST, module_args)['changed']
+
+
+def test_rest_delete_active_directory_preferred_domain_controllers():
+ '''Test rest delete active_directory preferred domain_controllers'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_12_0']),
+ ('GET', 'svm/svms', SRR['svm_record']),
+ ('GET', 'protocols/active-directory/3d52ad89-c278-11ed-a7b0-005056b3ed56/preferred-domain-controllers', SRR['DC_record']),
+ ('DELETE', 'protocols/active-directory/3d52ad89-c278-11ed-a7b0-005056b3ed56/preferred-domain-controllers/example.com/10.10.10.10', SRR['empty_good']),
+ ])
+ module_args = {
+ 'state': 'absent'
+ }
+ assert create_and_apply(my_module, ARGS_REST, module_args)['changed']
+
+
+def test_rest_error_delete_active_directory_preferred_domain_controllers():
+ '''Test error rest delete active_directory preferred domain_controllers'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_12_0']),
+ ('GET', 'svm/svms', SRR['svm_record']),
+ ('GET', 'protocols/active-directory/3d52ad89-c278-11ed-a7b0-005056b3ed56/preferred-domain-controllers', SRR['DC_record']),
+ ('DELETE', 'protocols/active-directory/3d52ad89-c278-11ed-a7b0-005056b3ed56/preferred-domain-controllers/example.com/10.10.10.10',
+ SRR['generic_error']),
+ ])
+ module_args = {
+ 'fqdn': 'example.com',
+ 'server_ip': '10.10.10.10',
+ 'state': 'absent'
+ }
+ error = call_main(my_main, ARGS_REST, module_args, fail=True)['msg']
+ msg = "Error on deleting Active Directory preferred DC configuration of an SVM:"
+ assert msg in error
+
+
+def test_create_idempotent_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_12_0']),
+ ('GET', 'svm/svms', SRR['svm_record']),
+ ('GET', 'protocols/active-directory/3d52ad89-c278-11ed-a7b0-005056b3ed56/preferred-domain-controllers', SRR['DC_record']),
+ ])
+ module_args = {
+ 'state': 'present',
+ 'fqdn': 'example.com',
+ 'server_ip': '10.10.10.10'
+ }
+ assert not call_main(my_main, ARGS_REST, module_args)['changed']
+
+
+def test_delete_idempotent_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_12_0']),
+ ('GET', 'svm/svms', SRR['svm_record']),
+ ('GET', 'protocols/active-directory/3d52ad89-c278-11ed-a7b0-005056b3ed56/preferred-domain-controllers', SRR['empty_records']),
+ ])
+ module_args = {
+ 'state': 'absent'
+ }
+ assert not call_main(my_main, ARGS_REST, module_args)['changed']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_aggregate.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_aggregate.py
new file mode 100644
index 000000000..c93260dcf
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_aggregate.py
@@ -0,0 +1,627 @@
+# (c) 2018-2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+""" unit tests for Ansible module: na_ontap_aggregate """
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch, Mock
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import \
+ patch_ansible, create_and_apply, create_module, expect_and_capture_ansible_exception
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke, register_responses, get_mock_record
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_responses, build_zapi_error
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_aggregate \
+ import NetAppOntapAggregate as my_module # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+AGGR_NAME = 'aggr_name'
+OS_NAME = 'abc'
+
+aggr_info = {'num-records': 3,
+ 'attributes-list':
+ {'aggr-attributes':
+ {'aggregate-name': AGGR_NAME,
+ 'aggr-raid-attributes': {
+ 'state': 'online',
+ 'disk-count': '4',
+ 'encrypt-with-aggr-key': 'true'},
+ 'aggr-snaplock-attributes': {'snaplock-type': 'snap_t'}}
+ },
+ }
+
+object_store_info = {'num-records': 1,
+ 'attributes-list':
+ {'object-store-information': {'object-store-name': OS_NAME}}
+ }
+
+disk_info = {'num-records': 1,
+ 'attributes-list': [
+ {'disk-info':
+ {'disk-name': '1',
+ 'disk-raid-info':
+ {'disk-aggregate-info':
+ {'plex-name': 'plex0'}
+ }}},
+ {'disk-info':
+ {'disk-name': '2',
+ 'disk-raid-info':
+ {'disk-aggregate-info':
+ {'plex-name': 'plex0'}
+ }}},
+ {'disk-info':
+ {'disk-name': '3',
+ 'disk-raid-info':
+ {'disk-aggregate-info':
+ {'plex-name': 'plexM'}
+ }}},
+ {'disk-info':
+ {'disk-name': '4',
+ 'disk-raid-info':
+ {'disk-aggregate-info':
+ {'plex-name': 'plexM'}
+ }}},
+ ]}
+
+ZRR = zapi_responses({
+ 'aggr_info': build_zapi_response(aggr_info),
+ 'object_store_info': build_zapi_response(object_store_info),
+ 'disk_info': build_zapi_response(disk_info),
+ 'error_disk_add': build_zapi_error(13003, 'disk add operation is in progress'),
+})
+
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'name': AGGR_NAME,
+ 'use_rest': 'never',
+ 'feature_flags': {'no_cserver_ems': True}
+}
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ error = create_module(my_module, fail=True)['msg']
+ print('Info: %s' % error)
+ assert 'missing required arguments:' in error
+ assert 'name' in error
+
+
+def test_create():
+ register_responses([
+ ('aggr-get-iter', ZRR['empty']),
+ ('aggr-create', ZRR['empty']),
+ ('aggr-get-iter', ZRR['empty']),
+ ])
+ module_args = {
+ 'disk_type': 'ATA',
+ 'raid_type': 'raid_dp',
+ 'snaplock_type': 'non_snaplock',
+ # 'spare_pool': 'Pool0',
+ 'disk_count': 4,
+ 'raid_size': 5,
+ 'disk_size': 10,
+ # 'disk_size_with_unit': 'dsize_unit',
+ 'is_mirrored': True,
+ 'ignore_pool_checks': True,
+ 'encryption': True,
+ 'nodes': ['node1', 'node2']
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_delete():
+ register_responses([
+ ('aggr-get-iter', ZRR['aggr_info']),
+ ('aggr-destroy', ZRR['empty'])
+ ])
+ module_args = {
+ 'state': 'absent',
+ 'disk_count': 3
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_create_with_spare_pool():
+ register_responses([
+ ('aggr-get-iter', ZRR['empty']),
+ ('aggr-create', ZRR['empty']),
+ ('aggr-get-iter', ZRR['empty']),
+ ])
+ module_args = {
+ 'disk_type': 'ATA',
+ 'raid_type': 'raid_dp',
+ 'snaplock_type': 'non_snaplock',
+ 'spare_pool': 'Pool0',
+ 'disk_count': 2,
+ 'raid_size': 5,
+ 'disk_size_with_unit': '10m',
+ # 'disk_size_with_unit': 'dsize_unit',
+ 'ignore_pool_checks': True,
+ 'encryption': True,
+ 'nodes': ['node1', 'node2']
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_create_with_disks():
+ register_responses([
+ ('aggr-get-iter', ZRR['empty']),
+ ('aggr-create', ZRR['empty']),
+ ('aggr-get-iter', ZRR['empty']),
+ ])
+ module_args = {
+ 'disk_type': 'ATA',
+ 'raid_type': 'raid_dp',
+ 'snaplock_type': 'non_snaplock',
+ 'disks': [1, 2],
+ 'mirror_disks': [11, 12],
+ 'raid_size': 5,
+ 'disk_size_with_unit': '10m',
+ 'ignore_pool_checks': True,
+ 'encryption': True,
+ 'nodes': ['node1', 'node2']
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+@patch('time.sleep')
+def test_create_wait_for_completion(mock_time):
+ register_responses([
+ ('aggr-get-iter', ZRR['empty']),
+ ('aggr-create', ZRR['empty']),
+ ('aggr-get-iter', ZRR['empty']),
+ ('aggr-get-iter', ZRR['empty']),
+ ('aggr-get-iter', ZRR['aggr_info']),
+ ])
+ module_args = {
+ 'disk_count': '2',
+ 'is_mirrored': 'true',
+ 'wait_for_online': 'true'
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_create_with_object_store():
+ register_responses([
+ ('aggr-get-iter', ZRR['empty']),
+ ('aggr-create', ZRR['empty']),
+ ('aggr-get-iter', ZRR['empty']),
+ ('aggr-object-store-attach', ZRR['empty']),
+ ])
+ module_args = {
+ 'disk_class': 'capacity',
+ 'disk_count': '2',
+ 'is_mirrored': 'true',
+ 'object_store_name': 'abc',
+ 'allow_flexgroups': True
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_is_mirrored():
+ register_responses([
+ ('aggr-get-iter', ZRR['aggr_info']),
+ ])
+ module_args = {
+ 'disk_count': '4',
+ 'is_mirrored': 'true',
+ }
+ assert not create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_disks_list():
+ register_responses([
+ ('aggr-get-iter', ZRR['aggr_info']),
+ ('storage-disk-get-iter', ZRR['disk_info']),
+ ])
+ module_args = {
+ 'disks': ['1', '2'],
+ }
+ assert not create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_mirror_disks():
+ register_responses([
+ ('aggr-get-iter', ZRR['aggr_info']),
+ ('storage-disk-get-iter', ZRR['disk_info']),
+ ])
+ module_args = {
+ 'disks': ['1', '2'],
+ 'mirror_disks': ['3', '4']
+ }
+ assert not create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_spare_pool():
+ register_responses([
+ ('aggr-get-iter', ZRR['aggr_info']),
+ ])
+ module_args = {
+ 'disk_count': '4',
+ 'spare_pool': 'Pool1'
+ }
+ assert not create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_negative_modify_encryption():
+ register_responses([
+ ('aggr-get-iter', ZRR['aggr_info']),
+ ])
+ module_args = {
+ 'encryption': False
+ }
+ exc = create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)
+ msg = 'Error: modifying encryption is not supported with ZAPI.'
+ assert msg in exc['msg']
+
+
+def test_rename():
+ register_responses([
+ ('aggr-get-iter', ZRR['empty']), # target does not exist
+ ('aggr-get-iter', ZRR['aggr_info']), # from exists
+ ('aggr-rename', ZRR['empty']),
+ ])
+ module_args = {
+ 'from_name': 'test_name2'
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rename_error_no_from():
+ register_responses([
+ ('aggr-get-iter', ZRR['empty']), # target does not exist
+ ('aggr-get-iter', ZRR['empty']), # from does not exist
+ ])
+ module_args = {
+ 'from_name': 'test_name2'
+ }
+ exception = create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)
+ msg = 'Error renaming aggregate %s: no aggregate with from_name %s.' % (AGGR_NAME, module_args['from_name'])
+ assert msg in exception['msg']
+
+
+def test_rename_with_add_object_store(): # TODO:
+ register_responses([
+ ('aggr-get-iter', ZRR['empty']), # target does not exist
+ ('aggr-get-iter', ZRR['aggr_info']), # from exists
+ ('aggr-object-store-get-iter', ZRR['empty']), # from does not have an OS
+ ('aggr-rename', ZRR['empty']),
+ ('aggr-object-store-attach', ZRR['empty']),
+ ])
+ module_args = {
+ 'from_name': 'test_name2',
+ 'object_store_name': 'abc',
+ 'allow_flexgroups': False
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_object_store_present():
+ register_responses([
+ ('aggr-get-iter', ZRR['aggr_info']),
+ ('aggr-object-store-get-iter', ZRR['object_store_info']),
+ ])
+ module_args = {
+ 'object_store_name': 'abc'
+ }
+ assert not create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_object_store_create():
+ register_responses([
+ ('aggr-get-iter', ZRR['aggr_info']),
+ ('aggr-object-store-get-iter', ZRR['empty']), # object_store is not attached
+ ('aggr-object-store-attach', ZRR['empty']),
+ ])
+ module_args = {
+ 'object_store_name': 'abc',
+ 'allow_flexgroups': True
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_object_store_modify():
+ ''' not supported '''
+ register_responses([
+ ('aggr-get-iter', ZRR['aggr_info']),
+ ('aggr-object-store-get-iter', ZRR['object_store_info']),
+ ])
+ module_args = {
+ 'object_store_name': 'def'
+ }
+ exception = create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)
+ msg = 'Error: object store %s is already associated with aggregate %s.' % (OS_NAME, AGGR_NAME)
+ assert msg in exception['msg']
+
+
+def test_if_all_methods_catch_exception():
+ register_responses([
+ ('aggr-get-iter', ZRR['error']),
+ ('aggr-online', ZRR['error']),
+ ('aggr-offline', ZRR['error']),
+ ('aggr-create', ZRR['error']),
+ ('aggr-destroy', ZRR['error']),
+ ('aggr-rename', ZRR['error']),
+ ('aggr-get-iter', ZRR['error']),
+ ])
+ module_args = {
+ 'service_state': 'online',
+ 'unmount_volumes': 'True',
+ 'from_name': 'test_name2',
+ }
+
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+
+ error = expect_and_capture_ansible_exception(my_obj.aggr_get_iter, 'fail', module_args.get('name'))['msg']
+ assert 'Error getting aggregate: NetApp API failed. Reason - 12345:synthetic error for UT purpose' in error
+
+ error = expect_and_capture_ansible_exception(my_obj.aggregate_online, 'fail')['msg']
+ assert 'Error changing the state of aggregate' in error
+
+ error = expect_and_capture_ansible_exception(my_obj.aggregate_offline, 'fail')['msg']
+ assert 'Error changing the state of aggregate' in error
+
+ error = expect_and_capture_ansible_exception(my_obj.create_aggr, 'fail')['msg']
+ assert 'Error provisioning aggregate' in error
+
+ error = expect_and_capture_ansible_exception(my_obj.delete_aggr, 'fail')['msg']
+ assert 'Error removing aggregate' in error
+
+ error = expect_and_capture_ansible_exception(my_obj.rename_aggregate, 'fail')['msg']
+ assert 'Error renaming aggregate' in error
+
+ my_obj.asup_log_for_cserver = Mock(return_value=None)
+ error = expect_and_capture_ansible_exception(my_obj.apply, 'fail')['msg']
+ assert '12345:synthetic error for UT purpose' in error
+
+
+def test_disks_bad_mapping():
+ register_responses([
+ ('aggr-get-iter', ZRR['aggr_info']),
+ ('storage-disk-get-iter', ZRR['disk_info']),
+ ])
+ module_args = {
+ 'disks': ['0'],
+ }
+ exception = create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)
+ msg = "Error mapping disks for aggregate %s: cannot match disks with current aggregate disks." % AGGR_NAME
+ assert exception['msg'].startswith(msg)
+
+
+def test_disks_overlapping_mirror():
+ register_responses([
+ ('aggr-get-iter', ZRR['aggr_info']),
+ ('storage-disk-get-iter', ZRR['disk_info']),
+ ])
+ module_args = {
+ 'disks': ['1', '2', '3'],
+ }
+ exception = create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)
+ msg = "Error mapping disks for aggregate %s: found overlapping plexes:" % AGGR_NAME
+ assert exception['msg'].startswith(msg)
+
+
+def test_disks_removing_disk():
+ register_responses([
+ ('aggr-get-iter', ZRR['aggr_info']),
+ ('storage-disk-get-iter', ZRR['disk_info']),
+ ])
+ module_args = {
+ 'disks': ['1'],
+ }
+ exception = create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)
+ msg = "Error removing disks is not supported. Aggregate %s: these disks cannot be removed: ['2']." % AGGR_NAME
+ assert exception['msg'].startswith(msg)
+
+
+def test_disks_removing_mirror_disk():
+ register_responses([
+ ('aggr-get-iter', ZRR['aggr_info']),
+ ('storage-disk-get-iter', ZRR['disk_info']),
+ ])
+ module_args = {
+ 'disks': ['1', '2'],
+ 'mirror_disks': ['4', '6']
+ }
+ exception = create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)
+ msg = "Error removing disks is not supported. Aggregate %s: these disks cannot be removed: ['3']." % AGGR_NAME
+ assert exception['msg'].startswith(msg)
+
+
+def test_disks_add():
+ register_responses([
+ ('aggr-get-iter', ZRR['aggr_info']),
+ ('storage-disk-get-iter', ZRR['disk_info']),
+ ])
+ register_responses([
+ ('aggr-get-iter', ZRR['aggr_info']),
+ ('storage-disk-get-iter', ZRR['disk_info']),
+ ('aggr-add', ZRR['empty']),
+ ])
+ module_args = {
+ 'disks': ['1', '2', '5'],
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_disks_add_and_offline():
+ register_responses([
+ ('aggr-get-iter', ZRR['aggr_info']),
+ ('storage-disk-get-iter', ZRR['disk_info']),
+ ('aggr-add', ZRR['empty']),
+ ('aggr-offline', ZRR['error_disk_add']),
+ ('aggr-offline', ZRR['error_disk_add']),
+ ('aggr-offline', ZRR['error_disk_add']),
+ ('aggr-offline', ZRR['success']),
+ # error if max tries attempted.
+ ('aggr-get-iter', ZRR['aggr_info']),
+ ('storage-disk-get-iter', ZRR['disk_info']),
+ ('aggr-add', ZRR['empty']),
+ ('aggr-offline', ZRR['error_disk_add']),
+ ('aggr-offline', ZRR['error_disk_add']),
+ ('aggr-offline', ZRR['error_disk_add']),
+ ('aggr-offline', ZRR['error_disk_add']),
+ ('aggr-offline', ZRR['error_disk_add']),
+ ('aggr-offline', ZRR['error_disk_add']),
+ ('aggr-offline', ZRR['error_disk_add']),
+ ('aggr-offline', ZRR['error_disk_add']),
+ ('aggr-offline', ZRR['error_disk_add']),
+ ('aggr-offline', ZRR['error_disk_add'])
+ ])
+ module_args = {
+ 'disks': ['1', '2', '5'], 'service_state': 'offline'
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+ assert 'disk add operation is in progres' in create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_mirror_disks_add():
+ register_responses([
+ ('aggr-get-iter', ZRR['aggr_info']),
+ ('storage-disk-get-iter', ZRR['disk_info']),
+ ('aggr-add', ZRR['empty']),
+ ])
+ module_args = {
+ 'disks': ['1', '2', '5'],
+ 'mirror_disks': ['3', '4', '6']
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_mirror_disks_add_unbalanced():
+ register_responses([
+ ('aggr-get-iter', ZRR['aggr_info']),
+ ('storage-disk-get-iter', ZRR['disk_info']),
+ ])
+ module_args = {
+ 'disks': ['1', '2'],
+ 'mirror_disks': ['3', '4', '6']
+ }
+ exception = create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)
+ msg = "Error cannot add mirror disks ['6'] without adding disks for aggregate %s." % AGGR_NAME
+ assert exception['msg'].startswith(msg)
+
+
+def test_map_plex_to_primary_and_mirror_error_overlap():
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ kwargs = {
+ 'plex_disks': {'plex1': [1, 2, 3], 'plex2': [4, 5, 6]},
+ 'disks': [1, 4, 5],
+ 'mirror_disks': []
+ }
+ error = expect_and_capture_ansible_exception(my_obj.map_plex_to_primary_and_mirror, 'fail', **kwargs)['msg']
+ msg = "Error mapping disks for aggregate aggr_name: found overlapping plexes:"
+ assert error.startswith(msg)
+
+
+def test_map_plex_to_primary_and_mirror_error_overlap_mirror():
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ kwargs = {
+ 'plex_disks': {'plex1': [1, 2, 3], 'plex2': [4, 5, 6]},
+ 'disks': [1, 4, 5],
+ 'mirror_disks': [1, 4, 5]
+ }
+ error = expect_and_capture_ansible_exception(my_obj.map_plex_to_primary_and_mirror, 'fail', **kwargs)['msg']
+ msg = "Error mapping disks for aggregate aggr_name: found overlapping mirror plexes:"
+ error.startswith(msg)
+
+
+def test_map_plex_to_primary_and_mirror_error_no_match():
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ kwargs = {
+ 'plex_disks': {'plex1': [1, 2, 3], 'plex2': [4, 5, 6]},
+ 'disks': [7, 8, 9],
+ 'mirror_disks': [10, 11, 12]
+ }
+ error = expect_and_capture_ansible_exception(my_obj.map_plex_to_primary_and_mirror, 'fail', **kwargs)['msg']
+ msg = ("Error mapping disks for aggregate aggr_name: cannot match disks with current aggregate disks, "
+ "and cannot match mirror_disks with current aggregate disks.")
+ assert error.startswith(msg)
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.has_netapp_lib')
+def test_missing_netapp_lib(mock_has_netapp_lib):
+ mock_has_netapp_lib.return_value = False
+ msg = 'Error: the python NetApp-Lib module is required. Import error: None'
+ assert msg == create_module(my_module, DEFAULT_ARGS, fail=True)['msg']
+
+
+def test_disk_get_iter_error():
+ register_responses([
+ ('storage-disk-get-iter', ZRR['error']),
+ ])
+ msg = 'Error getting disks: NetApp API failed. Reason - 12345:synthetic error for UT purpose'
+ assert msg == expect_and_capture_ansible_exception(create_module(my_module, DEFAULT_ARGS).disk_get_iter, 'fail', 'name')['msg']
+
+
+def test_object_store_get_iter_error():
+ register_responses([
+ ('aggr-object-store-get-iter', ZRR['error']),
+ ])
+ msg = 'Error getting object store: NetApp API failed. Reason - 12345:synthetic error for UT purpose'
+ assert msg == expect_and_capture_ansible_exception(create_module(my_module, DEFAULT_ARGS).object_store_get_iter, 'fail', 'name')['msg']
+
+
+def test_attach_object_store_to_aggr_error():
+ register_responses([
+ ('aggr-object-store-attach', ZRR['error']),
+ ])
+ module_args = {
+ 'object_store_name': 'os12',
+ }
+ msg = 'Error attaching object store os12 to aggregate aggr_name: NetApp API failed. Reason - 12345:synthetic error for UT purpose'
+ assert msg == expect_and_capture_ansible_exception(create_module(my_module, DEFAULT_ARGS, module_args).attach_object_store_to_aggr, 'fail')['msg']
+
+
+def test_add_disks_all_options_class():
+ register_responses([
+ ('aggr-add', ZRR['empty']),
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['ignore_pool_checks'] = True
+ my_obj.parameters['disk_class'] = 'performance'
+ assert my_obj.add_disks(count=2, disks=['1', '2'], disk_size=1, disk_size_with_unit='12GB') is None
+
+
+def test_add_disks_all_options_type():
+ register_responses([
+ ('aggr-add', ZRR['empty']),
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['ignore_pool_checks'] = True
+ my_obj.parameters['disk_type'] = 'SSD'
+ assert my_obj.add_disks(count=2, disks=['1', '2'], disk_size=1, disk_size_with_unit='12GB') is None
+
+
+def test_add_disks_error():
+ register_responses([
+ ('aggr-add', ZRR['error']),
+ ])
+ msg = 'Error adding additional disks to aggregate aggr_name: NetApp API failed. Reason - 12345:synthetic error for UT purpose'
+ assert msg == expect_and_capture_ansible_exception(create_module(my_module, DEFAULT_ARGS).add_disks, 'fail')['msg']
+
+
+def test_modify_aggr_offline():
+ register_responses([
+ ('aggr-offline', ZRR['empty']),
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ assert my_obj.modify_aggr({'service_state': 'offline'}) is None
+
+
+def test_modify_aggr_online():
+ register_responses([
+ ('aggr-online', ZRR['empty']),
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ assert my_obj.modify_aggr({'service_state': 'online'}) is None
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_aggregate_rest.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_aggregate_rest.py
new file mode 100644
index 000000000..1fc6bfbf2
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_aggregate_rest.py
@@ -0,0 +1,616 @@
+
+# (c) 2022-2023, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+""" unit tests for Ansible module: na_ontap_aggregate when using REST """
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+import sys
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ patch_ansible, create_and_apply, create_module, expect_and_capture_ansible_exception
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import get_mock_record, patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_aggregate \
+ import NetAppOntapAggregate as my_module, main as my_main # module under test
+
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+
+# REST API canned responses when mocking send_request.
+# The rest_factory provides default responses shared across testcases.
+SRR = rest_responses({
+ # module specific responses
+ 'one_record': (200, {'records': [
+ {'uuid': 'ansible', '_tags': ['resource:cloud', 'main:aggr'],
+ 'block_storage': {'primary': {'disk_count': 5}},
+ 'state': 'online', 'snaplock_type': 'snap'}
+ ]}, None),
+ 'two_records': (200, {'records': [
+ {'uuid': 'ansible',
+ 'block_storage': {'primary': {'disk_count': 5}},
+ 'state': 'online', 'snaplock_type': 'snap'},
+ {'uuid': 'ansible',
+ 'block_storage': {'primary': {'disk_count': 5}},
+ 'state': 'online', 'snaplock_type': 'snap'},
+ ]}, None),
+ 'no_uuid': (200, {'records': [
+ {'block_storage': {'primary': {'disk_count': 5}},
+ 'state': 'online', 'snaplock_type': 'snap'},
+ ]}, None),
+})
+
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'name': 'aggr_name'
+}
+
+
+def test_validate_options():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest'])
+ ], 'test_validate_options')
+ # no error!
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ assert my_obj.validate_options() is None
+
+ my_obj.parameters['nodes'] = [1, 2]
+
+ msg = 'Error when validating options: only one node can be specified when using rest'
+ assert msg in expect_and_capture_ansible_exception(my_obj.validate_options, 'fail')['msg']
+
+ my_obj.parameters['disk_count'] = 7
+ my_obj.parameters.pop('nodes')
+ msg = 'Error when validating options: nodes is required when disk_count is present.'
+ assert msg in expect_and_capture_ansible_exception(my_obj.validate_options, 'fail')['msg']
+
+ my_obj.use_rest = False
+ my_obj.parameters['mirror_disks'] = [1, 2]
+ msg = 'Error when validating options: mirror_disks require disks options to be set.'
+ assert msg in expect_and_capture_ansible_exception(my_obj.validate_options, 'fail')['msg']
+
+
+def test_get_disk_size():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+
+ my_obj.parameters['disk_size'] = 1
+ assert my_obj.get_disk_size() == 4096
+ my_obj.parameters['disk_size'] = 1000
+ assert my_obj.get_disk_size() == 4096000
+
+ my_obj.parameters.pop('disk_size')
+ my_obj.parameters['disk_size_with_unit'] = '1567'
+ assert my_obj.get_disk_size() == 1567
+ my_obj.parameters['disk_size_with_unit'] = '1567K'
+ assert my_obj.get_disk_size() == 1567 * 1024
+ my_obj.parameters['disk_size_with_unit'] = '1567gb'
+ assert my_obj.get_disk_size() == 1567 * 1024 * 1024 * 1024
+ my_obj.parameters['disk_size_with_unit'] = '15.67gb'
+ assert my_obj.get_disk_size() == int(15.67 * 1024 * 1024 * 1024)
+
+ my_obj.parameters['disk_size_with_unit'] = '1567rb'
+ error = expect_and_capture_ansible_exception(my_obj.get_disk_size, 'fail')['msg']
+ print('Info: %s' % error)
+ assert 'Error: unexpected unit in disk_size_with_unit: 1567rb' == error
+
+ my_obj.parameters['disk_size_with_unit'] = 'error'
+ error = expect_and_capture_ansible_exception(my_obj.get_disk_size, 'fail')['msg']
+ print('Info: %s' % error)
+ assert 'Error: unexpected value in disk_size_with_unit: error' == error
+
+
+def test_get_aggr_rest_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/aggregates', SRR['generic_error'])
+ ])
+ error = expect_and_capture_ansible_exception(create_module(my_module, DEFAULT_ARGS).get_aggr_rest, 'fail', 'aggr1')['msg']
+ print('Info: %s' % error)
+ assert 'Error: failed to get aggregate aggr1: calling: storage/aggregates: got Expected error.' == error
+
+
+def test_get_aggr_rest_none():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest'])
+ ])
+ assert create_module(my_module, DEFAULT_ARGS).get_aggr_rest(None) is None
+
+
+def test_get_aggr_rest_one_record():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/aggregates', SRR['one_record'])
+ ])
+ assert create_module(my_module, DEFAULT_ARGS).get_aggr_rest('aggr1') is not None
+
+
+def test_get_aggr_rest_not_found():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/aggregates', SRR['empty_records']),
+ ])
+ assert create_module(my_module, DEFAULT_ARGS).get_aggr_rest('aggr1') is None
+
+
+def test_create_aggr():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('POST', 'storage/aggregates', SRR['empty_good'])
+ ])
+ assert create_module(my_module, DEFAULT_ARGS).create_aggr_rest() is None
+ assert get_mock_record().is_record_in_json({'name': 'aggr_name'}, 'POST', 'storage/aggregates')
+
+
+def test_aggr_tags():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_13_1']),
+ ('GET', 'storage/aggregates', SRR['zero_records']),
+ ('POST', 'storage/aggregates', SRR['empty_good']),
+ # idempotent check
+ ('GET', 'cluster', SRR['is_rest_9_13_1']),
+ ('GET', 'storage/aggregates', SRR['one_record']),
+ # modify tags
+ ('GET', 'cluster', SRR['is_rest_9_13_1']),
+ ('GET', 'storage/aggregates', SRR['one_record']),
+ ('PATCH', 'storage/aggregates/ansible', SRR['success'])
+ ])
+ args = {'tags': ['resource:cloud', 'main:aggr']}
+ assert create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+ assert create_and_apply(my_module, DEFAULT_ARGS, {'tags': ['main:aggr']})['changed']
+
+
+def test_create_aggr_all_options():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('POST', 'storage/aggregates', SRR['empty_good'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['disk_class'] = 'capacity'
+ my_obj.parameters['disk_count'] = 12
+ my_obj.parameters['disk_size_with_unit'] = '1567gb'
+ my_obj.parameters['is_mirrored'] = True
+ my_obj.parameters['nodes'] = ['node1']
+ my_obj.parameters['raid_size'] = 4
+ my_obj.parameters['raid_type'] = 'raid5'
+ my_obj.parameters['encryption'] = True
+ my_obj.parameters['snaplock_type'] = 'snap'
+
+ assert my_obj.create_aggr_rest() is None
+ assert get_mock_record().is_record_in_json(
+ {'block_storage': {'primary': {'disk_class': 'capacity', 'disk_count': 12, 'raid_size': 4, 'raid_type': 'raid5'}, 'mirror': {'enabled': True}}},
+ 'POST', 'storage/aggregates')
+
+
+def test_create_aggr_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('POST', 'storage/aggregates', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['disk_count'] = 12
+ my_obj.parameters['disk_size_with_unit'] = '1567gb'
+ my_obj.parameters['is_mirrored'] = False
+ my_obj.parameters['nodes'] = ['node1']
+ my_obj.parameters['raid_size'] = 4
+ my_obj.parameters['raid_type'] = 'raid5'
+ my_obj.parameters['encryption'] = True
+
+ error = expect_and_capture_ansible_exception(my_obj.create_aggr_rest, 'fail')['msg']
+ print('Info: %s' % error)
+ assert 'Error: failed to create aggregate: calling: storage/aggregates: got Expected error.' == error
+
+
+def test_delete_aggr():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('DELETE', 'storage/aggregates/aggr_uuid', SRR['empty_good'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['state'] = 'absent'
+ my_obj.uuid = 'aggr_uuid'
+ assert my_obj.delete_aggr_rest() is None
+
+
+def test_delete_aggr_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('DELETE', 'storage/aggregates/aggr_uuid', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['state'] = 'absent'
+ my_obj.parameters['disk_size_with_unit'] = '1567gb'
+ my_obj.parameters['is_mirrored'] = False
+ my_obj.parameters['nodes'] = ['node1']
+ my_obj.parameters['raid_size'] = 4
+ my_obj.parameters['raid_type'] = 'raid5'
+ my_obj.parameters['encryption'] = True
+ my_obj.uuid = 'aggr_uuid'
+
+ error = expect_and_capture_ansible_exception(my_obj.delete_aggr_rest, 'fail')['msg']
+ print('Info: %s' % error)
+ assert 'Error: failed to delete aggregate: calling: storage/aggregates/aggr_uuid: got Expected error.' == error
+
+
+def test_patch_aggr():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('PATCH', 'storage/aggregates/aggr_uuid', SRR['empty_good'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.uuid = 'aggr_uuid'
+ my_obj.patch_aggr_rest('act on', {'key': 'value'})
+ assert get_mock_record().is_record_in_json({'key': 'value'}, 'PATCH', 'storage/aggregates/aggr_uuid')
+
+
+def test_patch_aggr_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('PATCH', 'storage/aggregates/aggr_uuid', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.uuid = 'aggr_uuid'
+
+ error = expect_and_capture_ansible_exception(my_obj.patch_aggr_rest, 'fail', 'act on', {'key': 'value'})['msg']
+ print('Info: %s' % error)
+ assert 'Error: failed to act on aggregate: calling: storage/aggregates/aggr_uuid: got Expected error.' == error
+
+
+def test_set_disk_count():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ current = {'disk_count': 2}
+ modify = {'disk_count': 5}
+ my_obj.set_disk_count(current, modify)
+ assert modify['disk_count'] == 3
+
+
+def test_set_disk_count_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+
+ current = {'disk_count': 9}
+ modify = {'disk_count': 5}
+ error = expect_and_capture_ansible_exception(my_obj.set_disk_count, 'fail', current, modify)['msg']
+ print('Info: %s' % error)
+ assert 'Error: specified disk_count is less than current disk_count. Only adding disks is allowed.' == error
+
+
+def test_add_disks():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('PATCH', 'storage/aggregates/aggr_uuid', SRR['empty_good'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['disk_class'] = 'performance'
+ my_obj.parameters['disk_count'] = 12
+ my_obj.uuid = 'aggr_uuid'
+ my_obj.add_disks_rest(count=2)
+ assert get_mock_record().is_record_in_json({'block_storage': {'primary': {'disk_count': 12}}}, 'PATCH', 'storage/aggregates/aggr_uuid')
+
+
+def test_add_disks_error_local():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.uuid = 'aggr_uuid'
+
+ error = expect_and_capture_ansible_exception(my_obj.add_disks_rest, 'fail', disks=[1, 2])['msg']
+ print('Info: %s' % error)
+ assert 'Error: disks or mirror disks are mot supported with rest: [1, 2], None.' == error
+
+
+def test_add_disks_error_remote():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('PATCH', 'storage/aggregates/aggr_uuid', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['disk_count'] = 12
+ my_obj.uuid = 'aggr_uuid'
+
+ error = expect_and_capture_ansible_exception(my_obj.add_disks_rest, 'fail', count=2)['msg']
+ print('Info: %s' % error)
+ assert 'Error: failed to increase disk count for aggregate: calling: storage/aggregates/aggr_uuid: got Expected error.' == error
+
+
+def test_rename_aggr():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('PATCH', 'storage/aggregates/aggr_uuid', SRR['empty_good'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.uuid = 'aggr_uuid'
+ my_obj.rename_aggr_rest()
+ assert get_mock_record().is_record_in_json({'name': 'aggr_name'}, 'PATCH', 'storage/aggregates/aggr_uuid')
+
+
+def test_offline_online_aggr_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('PATCH', 'storage/aggregates/aggr_uuid', SRR['generic_error']),
+ ('PATCH', 'storage/aggregates/aggr_uuid', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.uuid = 'aggr_uuid'
+ error = 'Error: failed to make service state online for aggregate'
+ assert error in expect_and_capture_ansible_exception(my_obj.aggregate_online, 'fail')['msg']
+ error = 'Error: failed to make service state offline for aggregate'
+ assert error in expect_and_capture_ansible_exception(my_obj.aggregate_offline, 'fail')['msg']
+
+
+def test_rename_aggr_error_remote():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('PATCH', 'storage/aggregates/aggr_uuid', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.uuid = 'aggr_uuid'
+
+ error = expect_and_capture_ansible_exception(my_obj.rename_aggr_rest, 'fail')['msg']
+ print('Info: %s' % error)
+ assert 'Error: failed to rename aggregate: calling: storage/aggregates/aggr_uuid: got Expected error.' == error
+
+
+def test_get_object_store():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/aggregates/aggr_uuid/cloud-stores', SRR['one_record'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.uuid = 'aggr_uuid'
+ record = my_obj.get_object_store_rest()
+ assert record
+
+
+def test_get_object_store_error_remote():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/aggregates/aggr_uuid/cloud-stores', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.uuid = 'aggr_uuid'
+
+ error = expect_and_capture_ansible_exception(my_obj.get_object_store_rest, 'fail')['msg']
+ print('Info: %s' % error)
+ assert 'Error: failed to get cloud stores for aggregate: calling: storage/aggregates/aggr_uuid/cloud-stores: got Expected error.' == error
+
+
+def test_get_cloud_target_uuid():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'cloud/targets', SRR['one_record'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['object_store_name'] = 'os12'
+ my_obj.uuid = 'aggr_uuid'
+ record = my_obj.get_cloud_target_uuid_rest()
+ assert record
+
+
+def test_get_cloud_target_uuid_error_remote():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'cloud/targets', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['object_store_name'] = 'os12'
+ my_obj.uuid = 'aggr_uuid'
+
+ error = expect_and_capture_ansible_exception(my_obj.get_cloud_target_uuid_rest, 'fail')['msg']
+ print('Info: %s' % error)
+ assert 'Error: failed to find cloud store with name os12: calling: cloud/targets: got Expected error.' == error
+
+
+def test_attach_object_store_to_aggr():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'cloud/targets', SRR['one_record']), # get object store UUID
+ ('POST', 'storage/aggregates/aggr_uuid/cloud-stores', SRR['empty_good']) # attach (POST)
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['object_store_name'] = 'os12'
+ my_obj.parameters['allow_flexgroups'] = True
+ my_obj.uuid = 'aggr_uuid'
+ assert my_obj.attach_object_store_to_aggr_rest() == {}
+
+
+def test_attach_object_store_to_aggr_error_remote():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'cloud/targets', SRR['one_record']), # get object store UUID
+ ('POST', 'storage/aggregates/aggr_uuid/cloud-stores', SRR['generic_error']) # attach (POST)
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['object_store_name'] = 'os12'
+ my_obj.uuid = 'aggr_uuid'
+
+ error = expect_and_capture_ansible_exception(my_obj.attach_object_store_to_aggr_rest, 'fail')['msg']
+ print('Info: %s' % error)
+ assert 'Error: failed to attach cloud store with name os12: calling: storage/aggregates/aggr_uuid/cloud-stores: got Expected error.' == error
+
+
+def test_apply_create():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/aggregates', SRR['empty_records']), # get
+ ('POST', 'storage/aggregates', SRR['empty_good']), # create (POST)
+ ])
+ assert create_and_apply(my_module, DEFAULT_ARGS)['changed']
+ assert get_mock_record().is_record_in_json({'name': 'aggr_name'}, 'POST', 'storage/aggregates')
+
+
+def test_apply_create_and_modify_service_state():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_1']),
+ ('GET', 'storage/aggregates', SRR['empty_records']), # get
+ ('POST', 'storage/aggregates', SRR['empty_good']), # create (POST)
+ ('PATCH', 'storage/aggregates', SRR['success']), # modify service state
+ ])
+ assert create_and_apply(my_module, DEFAULT_ARGS, {'service_state': 'offline'})['changed']
+ assert get_mock_record().is_record_in_json({'name': 'aggr_name'}, 'POST', 'storage/aggregates')
+
+
+def test_apply_create_fail_to_read_uuid():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/aggregates', SRR['empty_records']), # get
+ ('POST', 'storage/aggregates', SRR['two_records']), # create (POST)
+ ])
+ msg = 'Error: failed to parse create aggregate response: calling: storage/aggregates: unexpected response'
+ assert msg in create_and_apply(my_module, DEFAULT_ARGS, fail=True)['msg']
+
+
+def test_apply_create_fail_to_read_uuid_key_missing():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/aggregates', SRR['empty_records']), # get
+ ('POST', 'storage/aggregates', SRR['no_uuid']), # create (POST)
+ ])
+ msg = 'Error: failed to parse create aggregate response: uuid key not present in'
+ assert msg in create_and_apply(my_module, DEFAULT_ARGS, fail=True)['msg']
+
+
+def test_apply_create_with_object_store():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/aggregates', SRR['empty_records']), # get
+ ('POST', 'storage/aggregates', SRR['one_record']), # create (POST)
+ ('GET', 'cloud/targets', SRR['one_record']), # get object store uuid
+ ('POST', 'storage/aggregates/ansible/cloud-stores', SRR['empty_good']), # attach (POST)
+ ])
+ module_args = {
+ 'object_store_name': 'os12'
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+ assert get_mock_record().is_record_in_json({'name': 'aggr_name'}, 'POST', 'storage/aggregates')
+ assert get_mock_record().is_record_in_json({'target': {'uuid': 'ansible'}}, 'POST', 'storage/aggregates/ansible/cloud-stores')
+
+
+def test_apply_create_with_object_store_missing_uuid():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/aggregates', SRR['empty_records']), # get
+ ('POST', 'storage/aggregates', SRR['empty_good']), # create (POST)
+ ])
+ module_args = {
+ 'object_store_name': 'os12'
+ }
+ msg = 'Error: cannot attach cloud store with name os12: aggregate UUID is not set.'
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == msg
+ assert get_mock_record().is_record_in_json({'name': 'aggr_name'}, 'POST', 'storage/aggregates')
+
+
+def test_apply_create_check_mode():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/aggregates', SRR['empty_records']), # get
+ ])
+ assert create_and_apply(my_module, DEFAULT_ARGS, check_mode=True)['changed']
+
+
+def test_apply_add_disks():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/aggregates', SRR['one_record']), # get
+ ('PATCH', 'storage/aggregates/ansible', SRR['empty_good']), # patch (add disks)
+ ])
+ module_args = {
+ 'disk_count': 12,
+ 'nodes': 'node1'
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+ assert get_mock_record().is_record_in_json({'block_storage': {'primary': {'disk_count': 12}}}, 'PATCH', 'storage/aggregates/ansible')
+
+
+def test_apply_add_object_store():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/aggregates', SRR['one_record']), # get
+ ('GET', 'storage/aggregates/ansible/cloud-stores', SRR['empty_records']), # get aggr cloud store
+ ('GET', 'cloud/targets', SRR['one_record']), # get object store uuid
+ ('POST', 'storage/aggregates/ansible/cloud-stores', SRR['empty_good']), # attach
+ ])
+ module_args = {
+ 'object_store_name': 'os12',
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+ assert get_mock_record().is_record_in_json({'target': {'uuid': 'ansible'}}, 'POST', 'storage/aggregates/ansible/cloud-stores')
+
+
+def test_apply_rename():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/aggregates', SRR['empty_records']), # get aggr
+ ('GET', 'storage/aggregates', SRR['one_record']), # get from_aggr
+ ('PATCH', 'storage/aggregates/ansible', SRR['empty_good']), # patch (rename)
+ ])
+ module_args = {
+ 'from_name': 'old_aggr',
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+ assert get_mock_record().is_record_in_json({'name': 'aggr_name'}, 'PATCH', 'storage/aggregates/ansible')
+
+
+def test_apply_delete():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/aggregates', SRR['one_record']), # get
+ ('DELETE', 'storage/aggregates/ansible', SRR['empty_good']), # delete
+ ])
+ module_args = {
+ 'state': 'absent',
+ 'disk_count': 4
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_get_aggr_actions_error_service_state_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1'])
+ ])
+ error = 'Error: Minimum version of ONTAP for service_state is (9, 11, 1)'
+ assert error in create_module(my_module, DEFAULT_ARGS, {'service_state': 'online', 'use_rest': 'always'}, fail=True)['msg']
+
+
+def test_get_aggr_actions_error_snaplock():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/aggregates', SRR['one_record']), # get
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['snaplock_type'] = 'enterprise'
+
+ error = expect_and_capture_ansible_exception(my_obj.get_aggr_actions, 'fail')['msg']
+ print('Info: %s' % error)
+ assert 'Error: snaplock_type is not modifiable. Cannot change to: enterprise.' == error
+
+
+def test_main_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/aggregates', SRR['empty_records']), # get
+ ('POST', 'storage/aggregates', SRR['empty_good']), # create
+ ])
+ set_module_args(DEFAULT_ARGS)
+
+ assert expect_and_capture_ansible_exception(my_main, 'exit')['changed']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_autosupport.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_autosupport.py
new file mode 100644
index 000000000..c971520f1
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_autosupport.py
@@ -0,0 +1,264 @@
+# (c) 2018-2021, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test for ONTAP autosupport Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+import sys
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import\
+ assert_warning_was_raised, call_main, create_module, patch_ansible, print_warnings
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import\
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_error_message, rest_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_error_message, zapi_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_autosupport \
+ import NetAppONTAPasup as my_module, main as my_main # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+
+DEFAULT_ARGS = {
+ 'state': 'present',
+ 'hostname': '10.10.10.10',
+ 'username': 'admin',
+ 'https': 'true',
+ 'validate_certs': 'false',
+ 'password': 'password',
+ 'node_name': 'node1',
+ 'retry_count': '16',
+ 'transport': 'http',
+ 'ondemand_enabled': 'true'
+
+}
+
+
+# REST API canned responses when mocking send_request
+SRR = rest_responses({
+ 'one_asup_record': (200, {
+ "records": [{
+ 'node': 'node1',
+ 'state': True,
+ 'from': 'Postmaster',
+ 'support': True,
+ 'transport': 'http',
+ 'url': 'support.netapp.com/asupprod/post/1.0/postAsup',
+ 'proxy_url': 'username1:********@host.com:8080',
+ 'hostname_subj': True,
+ 'nht': False,
+ 'perf': True,
+ 'retry_count': 16,
+ 'reminder': True,
+ 'max_http_size': 10485760,
+ 'max_smtp_size': 5242880,
+ 'remove_private_data': False,
+ 'local_collection': True,
+ 'ondemand_state': True,
+ 'ondemand_server_url': 'https://support.netapp.com/aods/asupmessage',
+ 'partner_address': ['test@example.com']
+ }],
+ 'num_records': 1
+ }, None)
+})
+
+autosupport_info = {
+ 'attributes': {
+ 'autosupport-config-info': {
+ 'is-enabled': 'true',
+ 'node-name': 'node1',
+ 'transport': 'http',
+ 'post-url': 'support.netapp.com/asupprod/post/1.0/postAsup',
+ 'from': 'Postmaster',
+ 'proxy-url': 'username1:********@host.com:8080',
+ 'retry-count': '16',
+ 'max-http-size': '10485760',
+ 'max-smtp-size': '5242880',
+ 'is-support-enabled': 'true',
+ 'is-node-in-subject': 'true',
+ 'is-nht-data-enabled': 'false',
+ 'is-perf-data-enabled': 'true',
+ 'is-reminder-enabled': 'true',
+ 'is-private-data-removed': 'false',
+ 'is-local-collection-enabled': 'true',
+ 'is-ondemand-enabled': 'true',
+ 'validate-digital-certificate': 'true',
+
+ }
+ }
+}
+
+ZRR = zapi_responses({
+ 'autosupport_info': build_zapi_response(autosupport_info)
+})
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ assert 'missing required arguments:' in call_main(my_main, {}, fail=True)['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.HAS_NETAPP_LIB', False)
+def test_module_fail_when_netapp_lib_missing():
+ ''' required lib missing '''
+ module_args = {
+ 'use_rest': 'never',
+ }
+ assert 'Error: the python NetApp-Lib module is required. Import error: None' in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_ensure_get_called():
+ register_responses([
+ ('ZAPI', 'autosupport-config-get', ZRR['autosupport_info']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.get_autosupport_config() is not None
+
+
+def test_successful_modify():
+ ''' modifying asup and testing idempotency '''
+ register_responses([
+ ('ZAPI', 'autosupport-config-get', ZRR['autosupport_info']),
+ ('ZAPI', 'autosupport-config-modify', ZRR['success']),
+ # idempotency
+ ('ZAPI', 'autosupport-config-get', ZRR['autosupport_info']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'ondemand_enabled': False,
+ 'partner_addresses': [],
+ 'post_url': 'some_url',
+ 'from_address': 'from_add',
+ 'to_addresses': 'to_add',
+ 'hostname_in_subject': False,
+ 'nht_data_enabled': True,
+ 'perf_data_enabled': False,
+ 'reminder_enabled': False,
+ 'private_data_removed': True,
+ 'local_collection_enabled': False,
+ 'retry_count': 3,
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ # idempotency
+ module_args = {
+ 'use_rest': 'never',
+ 'ondemand_enabled': True,
+ 'partner_addresses': [],
+ 'post_url': 'support.netapp.com/asupprod/post/1.0/postAsup',
+ 'from_address': 'Postmaster',
+ 'hostname_in_subject': True,
+ 'nht_data_enabled': False,
+ 'perf_data_enabled': True,
+ 'reminder_enabled': True,
+ 'private_data_removed': False,
+ 'local_collection_enabled': True,
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_if_all_methods_catch_exception():
+ register_responses([
+ ('ZAPI', 'autosupport-config-get', ZRR['error']),
+ # idempotency
+ ('ZAPI', 'autosupport-config-get', ZRR['autosupport_info']),
+ ('ZAPI', 'autosupport-config-modify', ZRR['error']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'ondemand_enabled': False,
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg'] == zapi_error_message('Error fetching info')
+ assert call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg'] == zapi_error_message('Error modifying asup')
+
+
+def test_rest_modify_no_action():
+ ''' modify asup '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'private/cli/system/node/autosupport', SRR['one_asup_record']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_modify_prepopulate():
+ ''' modify asup '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'private/cli/system/node/autosupport', SRR['one_asup_record']),
+ ('PATCH', 'private/cli/system/node/autosupport', SRR['success']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'ondemand_enabled': False,
+ 'partner_addresses': [],
+ 'post_url': 'some_url',
+ 'from_address': 'from_add',
+ 'to_addresses': 'to_add',
+ 'hostname_in_subject': False,
+ 'nht_data_enabled': True,
+ 'perf_data_enabled': False,
+ 'reminder_enabled': False,
+ 'private_data_removed': True,
+ 'local_collection_enabled': False,
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_modify_pasword():
+ ''' modify asup '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'private/cli/system/node/autosupport', SRR['one_asup_record']),
+ ('PATCH', 'private/cli/system/node/autosupport', SRR['success']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ # different password, but no action
+ 'proxy_url': 'username1:password2@host.com:8080'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ print_warnings()
+ assert_warning_was_raised('na_ontap_autosupport is not idempotent because the password value in proxy_url cannot be compared.')
+
+
+def test_rest_get_error():
+ ''' modify asup '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'private/cli/system/node/autosupport', SRR['generic_error']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg'] == rest_error_message('Error fetching info', 'private/cli/system/node/autosupport')
+
+
+def test_rest_modify_error():
+ ''' modify asup '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'private/cli/system/node/autosupport', SRR['one_asup_record']),
+ ('PATCH', 'private/cli/system/node/autosupport', SRR['generic_error']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'ondemand_enabled': False,
+ 'partner_addresses': []
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg'] == rest_error_message('Error modifying asup', 'private/cli/system/node/autosupport')
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_autosupport_invoke.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_autosupport_invoke.py
new file mode 100644
index 000000000..872cffa1b
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_autosupport_invoke.py
@@ -0,0 +1,103 @@
+# (c) 2020, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests for Ansible module: na_ontap_autosupport_invoke '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat import unittest
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch, Mock
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_autosupport_invoke \
+ import NetAppONTAPasupInvoke as invoke_module # module under test
+
+# REST API canned responses when mocking send_request
+SRR = {
+ # common responses
+ 'is_rest': (200, {}, None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'end_of_sequence': (500, None, "Unexpected call to send_request"),
+ 'generic_error': (400, None, "Expected error")
+}
+
+
+class MockONTAPConnection(object):
+ ''' mock server connection to ONTAP host '''
+
+ def invoke_successfully(self, xml, enable_tunneling):
+ raise netapp_utils.zapi.NaApiError('test', 'Expected error')
+
+
+class TestMyModule(unittest.TestCase):
+ ''' Unit tests for na_ontap_wwpn_alias '''
+
+ def setUp(self):
+ self.mock_invoke = {
+ 'name': 'test_node',
+ 'message': 'test_message',
+ 'type': 'all'
+ }
+
+ def mock_args(self):
+ return {
+ 'message': self.mock_invoke['message'],
+ 'name': self.mock_invoke['name'],
+ 'type': self.mock_invoke['type'],
+ 'hostname': 'test_host',
+ 'username': 'test_user',
+ 'password': 'test_pass!'
+ }
+
+ def get_invoke_mock_object(self, use_rest=True):
+ invoke_obj = invoke_module()
+ if not use_rest:
+ invoke_obj.ems_log_event = Mock()
+ invoke_obj.server = MockONTAPConnection()
+ return invoke_obj
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_successful_send(self, mock_request):
+ '''Test successful send message'''
+ data = self.mock_args()
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_invoke_mock_object().apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_send_error(self, mock_request):
+ '''Test rest send error'''
+ data = self.mock_args()
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['generic_error'],
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_invoke_mock_object().apply()
+ msg = "Error on sending autosupport message to node %s: Expected error." % data['name']
+ assert exc.value.args[0]['msg'] == msg
+
+ def test_zapi_send_error(self):
+ '''Test rest send error'''
+ data = self.mock_args()
+ data['use_rest'] = 'Never'
+ set_module_args(data)
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_invoke_mock_object(use_rest=False).apply()
+ msg = "Error on sending autosupport message to node %s: NetApp API failed. Reason - test:Expected error." % data['name']
+ assert exc.value.args[0]['msg'] == msg
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_bgp_peer_group.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_bgp_peer_group.py
new file mode 100644
index 000000000..ea13a47fe
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_bgp_peer_group.py
@@ -0,0 +1,211 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import sys
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import patch_ansible, \
+ create_and_apply, create_module, expect_and_capture_ansible_exception, call_main
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke, \
+ register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_bgp_peer_group \
+ import NetAppOntapBgpPeerGroup as my_module, main as my_main # module under test
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'name': 'bgpv4peer',
+ 'use_rest': 'always',
+ 'local': {
+ 'interface': {
+ 'name': 'lif1'
+ }
+ },
+ 'peer': {
+ 'address': '10.10.10.7',
+ 'asn': 0
+ }
+}
+
+
+SRR = rest_responses({
+ 'bgp_peer_info': (200, {"records": [
+ {
+ "ipspace": {"name": "exchange"},
+ "local": {
+ "interface": {"ip": {"address": "10.10.10.7"}, "name": "lif1"},
+ "port": {"name": "e1b", "node": {"name": "node1"}}
+ },
+ "name": "bgpv4peer",
+ "peer": {"address": "10.10.10.7", "asn": 0},
+ "state": "up",
+ "uuid": "1cd8a442-86d1-11e0-ae1c-123478563412"
+ }], "num_records": 1}, None),
+ 'bgp_modified': (200, {"records": [
+ {
+ "ipspace": {"name": "exchange"},
+ "local": {
+ "interface": {"ip": {"address": "10.10.10.7"}, "name": "lif1"},
+ "port": {"name": "e1b", "node": {"name": "node1"}}
+ },
+ "name": "bgpv4peer",
+ "peer": {"address": "10.10.10.8", "asn": 0},
+ "state": "up",
+ "uuid": "1cd8a442-86d1-11e0-ae1c-123478563412"
+ }], "num_records": 1}, None),
+ 'bgp_name_modified': (200, {"records": [
+ {
+ "ipspace": {"name": "exchange"},
+ "local": {
+ "interface": {"ip": {"address": "10.10.10.7"}, "name": "lif1"},
+ "port": {"name": "e1b", "node": {"name": "node1"}}
+ },
+ "name": "newbgpv4peer",
+ "peer": {"address": "10.10.10.8", "asn": 0},
+ "state": "up",
+ "uuid": "1cd8a442-86d1-11e0-ae1c-123478563412"
+ }], "num_records": 1}, None),
+ 'bgp_peer_info_ipv6': (200, {"records": [
+ {
+ "ipspace": {"name": "exchange"},
+ "name": "bgpv6peer",
+ "peer": {"address": "2402:940::45", "asn": 0},
+ "state": "up",
+ "uuid": "1cd8a442-86d1-11e0-ae1c-123478563412"
+ }], "num_records": 1}, None),
+ 'bgp_modified_ipv6': (200, {"records": [
+ {
+ "ipspace": {"name": "exchange"},
+ "name": "bgpv6peer",
+ "peer": {"address": "2402:940::46", "asn": 0},
+ "state": "up",
+ "uuid": "1cd8a442-86d1-11e0-ae1c-123478563412"
+ }], "num_records": 1}, None),
+})
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ # with python 2.6, dictionaries are not ordered
+ fragments = ["missing required arguments:", "hostname", "name"]
+ error = create_module(my_module, {}, fail=True)['msg']
+ for fragment in fragments:
+ assert fragment in error
+
+
+def test_create_bgp_peer_group():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'network/ip/bgp/peer-groups', SRR['empty_records']),
+ ('POST', 'network/ip/bgp/peer-groups', SRR['success']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'network/ip/bgp/peer-groups', SRR['bgp_peer_info'])
+ ])
+ assert create_and_apply(my_module, DEFAULT_ARGS)['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS)['changed']
+
+
+def test_modify_bgp_peer_group():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'network/ip/bgp/peer-groups', SRR['bgp_peer_info']),
+ ('PATCH', 'network/ip/bgp/peer-groups/1cd8a442-86d1-11e0-ae1c-123478563412', SRR['success']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'network/ip/bgp/peer-groups', SRR['bgp_modified']),
+ # ipv6 modify
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'network/ip/bgp/peer-groups', SRR['bgp_peer_info_ipv6']),
+ ('PATCH', 'network/ip/bgp/peer-groups/1cd8a442-86d1-11e0-ae1c-123478563412', SRR['success']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'network/ip/bgp/peer-groups', SRR['bgp_modified_ipv6'])
+ ])
+ args = {'peer': {'address': '10.10.10.8'}}
+ assert create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+ args = {'name': 'bgpv6peer', 'peer': {'address': '2402:0940:000:000:00:00:0000:0046'}}
+ assert create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_rename_modify_bgp_peer_group():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'network/ip/bgp/peer-groups', SRR['bgp_peer_info']),
+ ('PATCH', 'network/ip/bgp/peer-groups/1cd8a442-86d1-11e0-ae1c-123478563412', SRR['success']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'network/ip/bgp/peer-groups', SRR['bgp_name_modified'])
+ ])
+ args = {'from_name': 'bgpv4peer', 'name': 'newbgpv4peer', 'peer': {'address': '10.10.10.8'}}
+ assert create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_delete_bgp_peer_group():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'network/ip/bgp/peer-groups', SRR['bgp_peer_info']),
+ ('DELETE', 'network/ip/bgp/peer-groups/1cd8a442-86d1-11e0-ae1c-123478563412', SRR['success']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'network/ip/bgp/peer-groups', SRR['empty_records'])
+ ])
+ assert create_and_apply(my_module, DEFAULT_ARGS, {'state': 'absent'})['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS, {'state': 'absent'})['changed']
+
+
+def test_all_methods_catch_exception():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ # GET/POST/PATCH/DELETE error.
+ ('GET', 'network/ip/bgp/peer-groups', SRR['generic_error']),
+ ('POST', 'network/ip/bgp/peer-groups', SRR['generic_error']),
+ ('PATCH', 'network/ip/bgp/peer-groups/1cd8a442', SRR['generic_error']),
+ ('DELETE', 'network/ip/bgp/peer-groups/1cd8a442', SRR['generic_error'])
+ ])
+ bgp_obj = create_module(my_module, DEFAULT_ARGS)
+ bgp_obj.uuid = '1cd8a442'
+ assert 'Error fetching BGP peer' in expect_and_capture_ansible_exception(bgp_obj.get_bgp_peer_group, 'fail')['msg']
+ assert 'Error creating BGP peer' in expect_and_capture_ansible_exception(bgp_obj.create_bgp_peer_group, 'fail')['msg']
+ assert 'Error modifying BGP peer' in expect_and_capture_ansible_exception(bgp_obj.modify_bgp_peer_group, 'fail', {})['msg']
+ assert 'Error deleting BGP peer' in expect_and_capture_ansible_exception(bgp_obj.delete_bgp_peer_group, 'fail')['msg']
+
+
+def test_modify_rename_create_error():
+ register_responses([
+ # Error if both name and from_name not exist.
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'network/ip/bgp/peer-groups', SRR['empty_records']),
+ ('GET', 'network/ip/bgp/peer-groups', SRR['empty_records']),
+ # Error if try to modify asn.
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'network/ip/bgp/peer-groups', SRR['bgp_peer_info']),
+ # Error if peer and local not present in args when creating peer groups.
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'network/ip/bgp/peer-groups', SRR['empty_records'])
+ ])
+ assert 'Error renaming BGP peer group' in create_and_apply(my_module, DEFAULT_ARGS, {'from_name': 'name'}, fail=True)['msg']
+ args = {'peer': {'asn': 5}}
+ assert 'Error: cannot modify peer asn.' in create_and_apply(my_module, DEFAULT_ARGS, args, fail=True)['msg']
+ DEFAULT_ARGS_COPY = DEFAULT_ARGS.copy()
+ del DEFAULT_ARGS_COPY['peer']
+ del DEFAULT_ARGS_COPY['local']
+ assert 'Error creating BGP peer group' in create_and_apply(my_module, DEFAULT_ARGS_COPY, fail=True)['msg']
+
+
+def test_error_ontap96():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96'])
+ ])
+ assert 'requires ONTAP 9.7.0 or later' in call_main(my_main, DEFAULT_ARGS, fail=True)['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_broadcast_domain.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_broadcast_domain.py
new file mode 100644
index 000000000..5a38d3933
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_broadcast_domain.py
@@ -0,0 +1,808 @@
+# (c) 2018-2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test template for ONTAP Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+import sys
+
+from ansible_collections.netapp.ontap.tests.unit.compat import unittest
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch, Mock
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import assert_no_warnings, set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_broadcast_domain \
+ import NetAppOntapBroadcastDomain as broadcast_domain_module # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not be available')
+
+
+class MockONTAPConnection(object):
+ ''' mock server connection to ONTAP host '''
+
+ def __init__(self, kind=None, data=None):
+ ''' save arguments '''
+ self.type = kind
+ self.params = data
+ self.xml_in = None
+ self.xml_out = None
+
+ def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
+ ''' mock invoke_successfully returning xml data '''
+ self.xml_in = xml
+ if self.type == 'broadcast_domain':
+ xml = self.build_broadcast_domain_info(self.params)
+ self.xml_out = xml
+ return xml
+
+ @staticmethod
+ def build_broadcast_domain_info(broadcast_domain_details):
+ ''' build xml data for broadcast_domain info '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ attributes = {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'net-port-broadcast-domain-info': {
+ 'broadcast-domain': broadcast_domain_details['name'],
+ 'ipspace': broadcast_domain_details['ipspace'],
+ 'mtu': broadcast_domain_details['mtu'],
+ 'ports': {
+ 'port-info': {
+ 'port': 'test_port_1'
+ }
+ }
+ }
+
+ }
+ }
+ xml.translate_struct(attributes)
+ return xml
+
+
+class TestMyModule(unittest.TestCase):
+ ''' a group of related Unit Tests '''
+
+ def setUp(self):
+ self.server = MockONTAPConnection()
+ self.mock_broadcast_domain = {
+ 'name': 'test_broadcast_domain',
+ 'mtu': 1000,
+ 'ipspace': 'Default',
+ 'ports': 'test_port_1'
+ }
+
+ def mock_args(self):
+ return {
+ 'name': self.mock_broadcast_domain['name'],
+ 'ipspace': self.mock_broadcast_domain['ipspace'],
+ 'mtu': self.mock_broadcast_domain['mtu'],
+ 'ports': self.mock_broadcast_domain['ports'],
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'use_rest': 'never',
+ 'feature_flags': {'no_cserver_ems': True}
+ }
+
+ def get_broadcast_domain_mock_object(self, kind=None, data=None):
+ """
+ Helper method to return an na_ontap_volume object
+ :param kind: passes this param to MockONTAPConnection()
+ :param data: passes this param to MockONTAPConnection()
+ :return: na_ontap_volume object
+ """
+ broadcast_domain_obj = broadcast_domain_module()
+ broadcast_domain_obj.asup_log_for_cserver = Mock(return_value=None)
+ broadcast_domain_obj.cluster = Mock()
+ broadcast_domain_obj.cluster.invoke_successfully = Mock()
+ if kind is None:
+ broadcast_domain_obj.server = MockONTAPConnection()
+ else:
+ if data is None:
+ broadcast_domain_obj.server = MockONTAPConnection(kind='broadcast_domain', data=self.mock_broadcast_domain)
+ else:
+ broadcast_domain_obj.server = MockONTAPConnection(kind='broadcast_domain', data=data)
+ return broadcast_domain_obj
+
+ def test_module_fail_when_required_args_missing(self):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ broadcast_domain_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+ def test_get_nonexistent_net_route(self):
+ ''' Test if get_broadcast_domain returns None for non-existent broadcast_domain '''
+ set_module_args(self.mock_args())
+ result = self.get_broadcast_domain_mock_object().get_broadcast_domain()
+ assert result is None
+
+ def test_create_error_missing_broadcast_domain(self):
+ ''' Test if create throws an error if broadcast_domain is not specified'''
+ data = self.mock_args()
+ del data['name']
+ set_module_args(data)
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_broadcast_domain_mock_object('broadcast_domain').create_broadcast_domain()
+ msg = 'missing required arguments: name'
+ assert exc.value.args[0]['msg'] == msg
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_broadcast_domain.NetAppOntapBroadcastDomain.create_broadcast_domain')
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_broadcast_domain.NetAppOntapBroadcastDomain.get_broadcast_domain')
+ def test_successful_create(self, get_broadcast_domain, create_broadcast_domain):
+ ''' Test successful create '''
+ data = self.mock_args()
+ set_module_args(data)
+ get_broadcast_domain.side_effect = [None]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_broadcast_domain_mock_object().apply()
+ assert exc.value.args[0]['changed']
+ create_broadcast_domain.assert_called_with(None)
+
+ def test_create_idempotency(self):
+ ''' Test create idempotency '''
+ set_module_args(self.mock_args())
+ obj = self.get_broadcast_domain_mock_object('broadcast_domain')
+ with pytest.raises(AnsibleExitJson) as exc:
+ obj.apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_broadcast_domain.NetAppOntapBroadcastDomain.create_broadcast_domain')
+ def test_create_idempotency_identical_ports(self, create_broadcast_domain):
+ ''' Test create idemptency identical ports '''
+ data = self.mock_args()
+ data['ports'] = ['test_port_1', 'test_port_1']
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_broadcast_domain_mock_object('broadcast_domain').apply()
+ assert not exc.value.args[0]['changed']
+
+ def test_modify_mtu(self):
+ ''' Test successful modify mtu '''
+ data = self.mock_args()
+ data['mtu'] = 1200
+ data['from_ipspace'] = 'test'
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_broadcast_domain_mock_object('broadcast_domain').apply()
+ assert exc.value.args[0]['changed']
+
+ def test_modify_ipspace_idempotency(self):
+ ''' Test modify ipsapce idempotency'''
+ data = self.mock_args()
+ data['ipspace'] = 'Default'
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_broadcast_domain_mock_object('broadcast_domain').apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_broadcast_domain.NetAppOntapBroadcastDomain.add_broadcast_domain_ports')
+ def test_add_ports(self, add_broadcast_domain_ports):
+ ''' Test successful modify ports '''
+ data = self.mock_args()
+ data['ports'] = 'test_port_1,test_port_2'
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_broadcast_domain_mock_object('broadcast_domain').apply()
+ assert exc.value.args[0]['changed']
+ add_broadcast_domain_ports.assert_called_with(['test_port_2'])
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_broadcast_domain.NetAppOntapBroadcastDomain.delete_broadcast_domain_ports')
+ def test_delete_ports(self, delete_broadcast_domain_ports):
+ ''' Test successful modify ports '''
+ data = self.mock_args()
+ data['ports'] = ''
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_broadcast_domain_mock_object('broadcast_domain').apply()
+ assert exc.value.args[0]['changed']
+ delete_broadcast_domain_ports.assert_called_with(['test_port_1'])
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_broadcast_domain.NetAppOntapBroadcastDomain.modify_broadcast_domain')
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_broadcast_domain.NetAppOntapBroadcastDomain.split_broadcast_domain')
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_broadcast_domain.NetAppOntapBroadcastDomain.get_broadcast_domain')
+ def test_split_broadcast_domain(self, get_broadcast_domain, split_broadcast_domain, modify_broadcast_domain):
+ ''' Test successful split broadcast domain '''
+ data = self.mock_args()
+ data['from_name'] = 'test_broadcast_domain'
+ data['name'] = 'test_broadcast_domain_2'
+ data['ports'] = 'test_port_2'
+ set_module_args(data)
+ current = {
+ 'domain-name': 'test_broadcast_domain',
+ 'mtu': 1000,
+ 'ipspace': 'Default',
+ 'ports': ['test_port_1,test_port2']
+ }
+ get_broadcast_domain.side_effect = [
+ None,
+ current
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_broadcast_domain_mock_object().apply()
+ assert exc.value.args[0]['changed']
+ modify_broadcast_domain.assert_not_called()
+ split_broadcast_domain.assert_called_with()
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_broadcast_domain.NetAppOntapBroadcastDomain.delete_broadcast_domain')
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_broadcast_domain.NetAppOntapBroadcastDomain.modify_broadcast_domain')
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_broadcast_domain.NetAppOntapBroadcastDomain.get_broadcast_domain')
+ def test_split_broadcast_domain_modify_delete(self, get_broadcast_domain, modify_broadcast_domain, delete_broadcast_domain):
+ ''' Test successful split broadcast domain '''
+ data = self.mock_args()
+ data['from_name'] = 'test_broadcast_domain'
+ data['name'] = 'test_broadcast_domain_2'
+ data['ports'] = ['test_port_1', 'test_port_2']
+ data['mtu'] = 1200
+ set_module_args(data)
+ current = {
+ 'name': 'test_broadcast_domain',
+ 'mtu': 1000,
+ 'ipspace': 'Default',
+ 'ports': ['test_port_1', 'test_port2']
+ }
+ get_broadcast_domain.side_effect = [
+ None,
+ current
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_broadcast_domain_mock_object().apply()
+ assert exc.value.args[0]['changed']
+ delete_broadcast_domain.assert_called_with('test_broadcast_domain')
+ modify_broadcast_domain.assert_called_with()
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_broadcast_domain.NetAppOntapBroadcastDomain.get_broadcast_domain')
+ def test_split_broadcast_domain_not_exist(self, get_broadcast_domain):
+ ''' Test split broadcast domain does not exist '''
+ data = self.mock_args()
+ data['from_name'] = 'test_broadcast_domain'
+ data['name'] = 'test_broadcast_domain_2'
+ data['ports'] = 'test_port_2'
+ set_module_args(data)
+
+ get_broadcast_domain.side_effect = [
+ None,
+ None,
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_broadcast_domain_mock_object().apply()
+ msg = 'A domain cannot be split if it does not exist.'
+ assert exc.value.args[0]['msg'], msg
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_broadcast_domain.NetAppOntapBroadcastDomain.split_broadcast_domain')
+ def test_split_broadcast_domain_idempotency(self, split_broadcast_domain):
+ ''' Test successful split broadcast domain '''
+ data = self.mock_args()
+ data['from_name'] = 'test_broadcast_domain'
+ data['ports'] = 'test_port_1'
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_broadcast_domain_mock_object('broadcast_domain').apply()
+ assert not exc.value.args[0]['changed']
+ split_broadcast_domain.assert_not_called()
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_broadcast_domain.NetAppOntapBroadcastDomain.delete_broadcast_domain')
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_broadcast_domain.NetAppOntapBroadcastDomain.get_broadcast_domain')
+ def test_delete_broadcast_domain(self, get_broadcast_domain, delete_broadcast_domain):
+ ''' test delete broadcast domain '''
+ data = self.mock_args()
+ data['state'] = 'absent'
+ set_module_args(data)
+ current = {
+ 'name': 'test_broadcast_domain',
+ 'mtu': 1000,
+ 'ipspace': 'Default',
+ 'ports': ['test_port_1', 'test_port2']
+ }
+ get_broadcast_domain.side_effect = [current]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_broadcast_domain_mock_object().apply()
+ assert exc.value.args[0]['changed']
+ delete_broadcast_domain.assert_called_with(current=current)
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_broadcast_domain.NetAppOntapBroadcastDomain.delete_broadcast_domain')
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_broadcast_domain.NetAppOntapBroadcastDomain.get_broadcast_domain')
+ def test_delete_broadcast_domain_idempotent(self, get_broadcast_domain, delete_broadcast_domain):
+ ''' test delete broadcast domain '''
+ data = self.mock_args()
+ data['state'] = 'absent'
+ set_module_args(data)
+ get_broadcast_domain.side_effect = [None]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_broadcast_domain_mock_object().apply()
+ assert not exc.value.args[0]['changed']
+ delete_broadcast_domain.assert_not_called()
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_broadcast_domain.NetAppOntapBroadcastDomain.delete_broadcast_domain')
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_broadcast_domain.NetAppOntapBroadcastDomain.get_broadcast_domain')
+ def test_delete_broadcast_domain_if_all_ports_are_removed(self, get_broadcast_domain, delete_broadcast_domain):
+ ''' test delete broadcast domain if all the ports are deleted '''
+ data = self.mock_args()
+ data['ports'] = []
+ data['state'] = 'present'
+ set_module_args(data)
+ current = {
+ 'name': 'test_broadcast_domain',
+ 'mtu': 1000,
+ 'ipspace': 'Default',
+ 'ports': ['test_port_1', 'test_port2']
+ }
+ get_broadcast_domain.side_effect = [current]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_broadcast_domain_mock_object().apply()
+ assert exc.value.args[0]['changed']
+ delete_broadcast_domain.assert_called_with(current=current)
+
+
+def default_args():
+ args = {
+ 'state': 'present',
+ 'hostname': '10.10.10.10',
+ 'username': 'admin',
+ 'https': 'true',
+ 'validate_certs': 'false',
+ 'password': 'password',
+ 'use_rest': 'always'
+ }
+ return args
+
+
+# REST API canned responses when mocking send_request
+SRR = {
+ # common responses
+ 'is_rest': (200, dict(version=dict(generation=9, major=9, minor=0, full='dummy')), None),
+ 'is_rest_9_6': (200, dict(version=dict(generation=9, major=6, minor=0, full='dummy')), None),
+ 'is_rest_9_7': (200, dict(version=dict(generation=9, major=7, minor=0, full='dummy')), None),
+ 'is_rest_9_8': (200, dict(version=dict(generation=9, major=8, minor=0, full='dummy')), None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'zero_record': (200, dict(records=[], num_records=0), None),
+ 'one_record_uuid': (200, dict(records=[dict(uuid='a1b2c3')], num_records=1), None),
+ 'end_of_sequence': (500, None, "Unexpected call to send_request"),
+ 'generic_error': (400, None, "Expected error"),
+ 'port_detail_e0d': (200, {
+ "num_records": 1,
+ "records": [
+ {
+ 'name': 'e0d',
+ 'node': {'name': 'mohan9cluster2-01'},
+ 'uuid': 'ea670505-2ab3-11ec-aa30-005056b3dfc8'
+ }]
+ }, None),
+ 'port_detail_e0a': (200, {
+ "num_records": 1,
+ "records": [
+ {
+ 'name': 'e0a',
+ 'node': {'name': 'mohan9cluster2-01'},
+ 'uuid': 'ea63420b-2ab3-11ec-aa30-005056b3dfc8'
+ }]
+ }, None),
+ 'port_detail_e0b': (200, {
+ "num_records": 1,
+ "records": [
+ {
+ 'name': 'e0b',
+ 'node': {'name': 'mohan9cluster2-01'},
+ 'uuid': 'ea64c0f2-2ab3-11ec-aa30-005056b3dfc8'
+ }]
+ }, None),
+ 'broadcast_domain_record': (200, {
+ "num_records": 1,
+ "records": [
+ {
+ "uuid": "4475a2c8-f8a0-11e8-8d33-005056bb986f",
+ "name": "domain1",
+ "ipspace": {"name": "ip1"},
+ "ports": [
+ {
+ "uuid": "ea63420b-2ab3-11ec-aa30-005056b3dfc8",
+ "name": "e0a",
+ "node": {
+ "name": "mohan9cluster2-01"
+ }
+ },
+ {
+ "uuid": "ea64c0f2-2ab3-11ec-aa30-005056b3dfc8",
+ "name": "e0b",
+ "node": {
+ "name": "mohan9cluster2-01"
+ }
+ },
+ {
+ "uuid": "ea670505-2ab3-11ec-aa30-005056b3dfc8",
+ "name": "e0d",
+ "node": {
+ "name": "mohan9cluster2-01"
+ }
+ }
+ ],
+ "mtu": 9000
+ }]
+ }, None),
+ 'broadcast_domain_record_split': (200, {
+ "num_records": 1,
+ "records": [
+ {
+ "uuid": "4475a2c8-f8a0-11e8-8d33-005056bb986f",
+ "name": "domain2",
+ "ipspace": {"name": "ip1"},
+ "ports": [
+ {
+ "uuid": "ea63420b-2ab3-11ec-aa30-005056b3dfc8",
+ "name": "e0a",
+ "node": {
+ "name": "mohan9cluster2-01"
+ }
+ }
+ ],
+ "mtu": 9000
+ }]
+ }, None)
+}
+
+
+def test_module_fail_when_required_args_missing(patch_ansible):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args(dict(hostname=''))
+ broadcast_domain_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+ msg = 'missing required arguments:'
+ assert msg in exc.value.args[0]['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_create_broadcast_domain(mock_request, patch_ansible):
+ ''' test create broadcast domain '''
+ args = dict(default_args())
+ args['name'] = "domain1"
+ args['ipspace'] = "ip1"
+ args['mtu'] = "9000"
+ args['ports'] = ["mohan9cluster2-01:e0a", "mohan9cluster2-01:e0b", "mohan9cluster2-01:e0d"]
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['port_detail_e0a'],
+ SRR['port_detail_e0b'],
+ SRR['port_detail_e0d'],
+ SRR['zero_record'], # get
+ SRR['empty_good'], # create
+ SRR['empty_good'], # add e0a
+ SRR['empty_good'], # add e0b
+ SRR['empty_good'], # add e0c
+ SRR['end_of_sequence']
+ ]
+ my_obj = broadcast_domain_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is True
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_create_broadcast_domain_idempotency(mock_request, patch_ansible):
+ ''' test create broadcast domain '''
+ args = dict(default_args())
+ args['name'] = "domain1"
+ args['ipspace'] = "ip1"
+ args['mtu'] = 9000
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['broadcast_domain_record'], # get
+ SRR['end_of_sequence']
+ ]
+ my_obj = broadcast_domain_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is False
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_create_broadcast_domain_idempotency_identical_ports(mock_request, patch_ansible):
+ ''' test create broadcast domain '''
+ args = dict(default_args())
+ args['name'] = "domain2"
+ args['ipspace'] = "ip1"
+ args['mtu'] = 9000
+ args['ports'] = ['mohan9cluster2-01:e0a', 'mohan9cluster2-01:e0a']
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['port_detail_e0a'],
+ SRR['broadcast_domain_record_split'], # get
+ SRR['end_of_sequence']
+ ]
+ my_obj = broadcast_domain_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is False
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_modify_broadcast_domain(mock_request, patch_ansible):
+ ''' test modify broadcast domain mtu '''
+ args = dict(default_args())
+ args['name'] = "domain1"
+ args['ipspace'] = "ip1"
+ args['mtu'] = 1500
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['broadcast_domain_record'], # get
+ SRR['empty_good'], # modify
+ SRR['end_of_sequence']
+ ]
+ my_obj = broadcast_domain_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is True
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rename_broadcast_domain(mock_request, patch_ansible):
+ ''' test modify broadcast domain mtu '''
+ args = dict(default_args())
+ args['from_name'] = "domain1"
+ args['name'] = "domain2"
+ args['ipspace'] = "ip1"
+ args['mtu'] = 1500
+ args['ports'] = ["mohan9cluster2-01:e0a", "mohan9cluster2-01:e0b", "mohan9cluster2-01:e0d"]
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['port_detail_e0a'],
+ SRR['port_detail_e0b'],
+ SRR['port_detail_e0d'],
+ SRR['zero_record'], # get
+ SRR['broadcast_domain_record'], # get
+ SRR['empty_good'], # rename broadcast domain
+ SRR['end_of_sequence']
+ ]
+ my_obj = broadcast_domain_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is True
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_split_broadcast_domain_create_domain2_with_e0a(mock_request, patch_ansible):
+ ''' test modify broadcast domain mtu '''
+ args = dict(default_args())
+ args['from_name'] = "domain1"
+ args['name'] = "domain2"
+ args['ipspace'] = "ip1"
+ args['mtu'] = 1500
+ args['ports'] = ["mohan9cluster2-01:e0a"]
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['port_detail_e0a'],
+ SRR['zero_record'], # get
+ SRR['broadcast_domain_record'], # get
+ SRR['empty_good'], # create broadcast domain
+ SRR['empty_good'], # add e0a to domain2
+ SRR['end_of_sequence']
+ ]
+ my_obj = broadcast_domain_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is True
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_split_broadcast_domain_create_domain2_with_e0a_idempotent(mock_request, patch_ansible):
+ ''' test modify broadcast domain mtu '''
+ args = dict(default_args())
+ args['from_name'] = "domain1"
+ args['name'] = "domain2"
+ args['ipspace'] = "ip1"
+ args['mtu'] = 1500
+ args['ports'] = ["mohan9cluster2-01:e0a"]
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['port_detail_e0a'],
+ SRR['broadcast_domain_record_split'], # get domain2 details
+ SRR['zero_record'], # empty record for domain1
+ SRR['end_of_sequence']
+ ]
+ my_obj = broadcast_domain_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is False
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_create_new_broadcast_domain_with_partial_match(mock_request, patch_ansible):
+ ''' test modify broadcast domain mtu '''
+ args = dict(default_args())
+ args['from_name'] = "domain2"
+ args['name'] = "domain1"
+ args['ipspace'] = "ip1"
+ args['mtu'] = 1500
+ args['ports'] = ["mohan9cluster2-01:e0b"]
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['port_detail_e0b'],
+ SRR['zero_record'], # empty record for domain1
+ SRR['broadcast_domain_record_split'], # get domain2 details
+ SRR['empty_good'], # create broadcast domain domain1
+ SRR['empty_good'], # add e0b to domain1
+ SRR['end_of_sequence']
+ ]
+ my_obj = broadcast_domain_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is True
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_delete_broadcast_domain(mock_request, patch_ansible):
+ ''' test delete broadcast domain mtu '''
+ args = dict(default_args())
+ args['name'] = "domain1"
+ args['ipspace'] = "ip1"
+ args['mtu'] = 1500
+ args['state'] = "absent"
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['broadcast_domain_record'], # get
+ SRR['empty_good'], # remove all the ports in broadcast domain
+ SRR['empty_good'], # delete broadcast domain
+ SRR['end_of_sequence']
+ ]
+ my_obj = broadcast_domain_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is True
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_module_try_to_bad_format_port(mock_request, patch_ansible):
+ ''' test delete broadcast domain mtu '''
+ args = dict(default_args())
+ args['name'] = "domain1"
+ args['ipspace'] = "ip1"
+ args['mtu'] = 1500
+ args['state'] = "present"
+ args['ports'] = ["mohan9cluster2-01e0a"]
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj = broadcast_domain_module()
+ print('Info: %s' % exc.value.args[0])
+ msg = "Error: Invalid value specified for port: mohan9cluster2-01e0a, provide port name as node_name:port_name"
+ assert msg in exc.value.args[0]['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_module_try_to_create_domain_without_ipspace(mock_request, patch_ansible):
+ ''' test delete broadcast domain mtu '''
+ args = dict(default_args())
+ args['name'] = "domain1"
+ args['mtu'] = 1500
+ args['state'] = "present"
+ args['ports'] = ["mohan9cluster2-01:e0a"]
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj = broadcast_domain_module()
+ print('Info: %s' % exc.value.args[0])
+ msg = "Error: ipspace space is a required option with REST"
+ assert msg in exc.value.args[0]['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_module_modify_ipspace(mock_request, patch_ansible):
+ ''' test modify ipspace '''
+ args = dict(default_args())
+ args['name'] = "domain2"
+ args['from_ipspace'] = "ip1"
+ args['ipspace'] = "Default"
+ args['mtu'] = 1500
+ args['ports'] = ["mohan9cluster2-01:e0b"]
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['port_detail_e0b'],
+ SRR['zero_record'], # empty record for domain2 in ipspace Default
+ SRR['broadcast_domain_record_split'], # get domain2 details in ipspace ip1
+ SRR['empty_good'], # modify ipspace
+ SRR['empty_good'], # add e0b to domain2
+ SRR['empty_good'], # remove e0a
+ SRR['end_of_sequence']
+ ]
+ my_obj = broadcast_domain_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is True
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_module_modify_name_and_ipspace(mock_request, patch_ansible):
+ ''' test modify ipspace '''
+ args = dict(default_args())
+ args['from_name'] = "domain2"
+ args['name'] = "domain1"
+ args['from_ipspace'] = "ip1"
+ args['ipspace'] = "Default"
+ args['mtu'] = 1500
+ args['ports'] = ["mohan9cluster2-01:e0a"]
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['port_detail_e0a'],
+ SRR['zero_record'], # empty record for domain2 in ipspace Default
+ SRR['broadcast_domain_record_split'], # get domain2 details in ipspace ip1
+ SRR['empty_good'], # modify name, ipspace and mtu
+ SRR['end_of_sequence']
+ ]
+ my_obj = broadcast_domain_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is True
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_module_split_name_ipspace_if_not_exact_match_of_ports(mock_request, patch_ansible):
+ ''' test create new domain as exact match not found '''
+ args = dict(default_args())
+ args['from_name'] = "domain2"
+ args['name'] = "domain1"
+ args['from_ipspace'] = "ip1"
+ args['ipspace'] = "Default"
+ args['mtu'] = 1500
+ args['ports'] = ["mohan9cluster2-01:e0b"]
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['port_detail_e0b'],
+ SRR['zero_record'], # empty record for domain1 in ipspace Default
+ SRR['broadcast_domain_record_split'], # get domain2 details in ipspace ip1
+ SRR['empty_good'], # create new broadcast domain domain1 in ipspace Default
+ SRR['empty_good'], # Add e0b to domain1
+ SRR['end_of_sequence']
+ ]
+ my_obj = broadcast_domain_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is True
+ assert_no_warnings()
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cg_snapshot.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cg_snapshot.py
new file mode 100644
index 000000000..78c35ba73
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cg_snapshot.py
@@ -0,0 +1,81 @@
+# (c) 2018, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests for Ansible module: na_ontap_cg_snapshot'''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat import unittest
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_cg_snapshot \
+ import NetAppONTAPCGSnapshot as my_module # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+class MockONTAPConnection(object):
+ ''' mock server connection to ONTAP host '''
+
+ def __init__(self, kind=None, parm1=None):
+ ''' save arguments '''
+ self.type = kind
+ self.parm1 = parm1
+ self.xml_in = None
+ self.xml_out = None
+
+ def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
+ ''' mock invoke_successfully returning xml data '''
+ self.xml_in = xml
+ if self.type == 'vserver':
+ xml = self.build_vserver_info(self.parm1)
+ self.xml_out = xml
+ return xml
+
+ @staticmethod
+ def build_vserver_info(vserver):
+ ''' build xml data for vserser-info '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ attributes = netapp_utils.zapi.NaElement('attributes-list')
+ attributes.add_node_with_children('vserver-info',
+ **{'vserver-name': vserver})
+ xml.add_child_elem(attributes)
+ # print(xml.to_string())
+ return xml
+
+
+class TestMyModule(unittest.TestCase):
+ ''' a group of related Unit Tests '''
+
+ def setUp(self):
+ self.server = MockONTAPConnection()
+
+ def test_module_fail_when_required_args_missing(self):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ my_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+ def test_ensure_command_called(self):
+ ''' a more interesting test '''
+ set_module_args({
+ 'vserver': 'vserver',
+ 'volumes': 'volumes',
+ 'snapshot': 'snapshot',
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ })
+ my_obj = my_module()
+ my_obj.server = self.server
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.cgcreate()
+ msg = 'Error fetching CG ID for CG commit snapshot'
+ assert exc.value.args[0]['msg'] == msg
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cifs.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cifs.py
new file mode 100644
index 000000000..99aa0d140
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cifs.py
@@ -0,0 +1,464 @@
+# (c) 2018-2023, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests ONTAP Ansible module: na_ontap_cifs '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import\
+ patch_ansible, call_main, create_module, create_and_apply, expect_and_capture_ansible_exception
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_cifs \
+ import NetAppONTAPCifsShare as my_module, main as my_main # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+# REST API canned responses when mocking send_request
+SRR = rest_responses({
+ # module specific responses
+ 'cifs_record': (
+ 200,
+ {
+ "records": [
+ {
+ "svm": {
+ "uuid": "671aa46e-11ad-11ec-a267-005056b30cfa",
+ "name": "ansibleSVM"
+ },
+ "name": 'cifs_share_name',
+ "path": '/',
+ "comment": 'CIFS share comment',
+ "unix_symlink": 'widelink',
+ "target": {
+ "name": "20:05:00:50:56:b3:0c:fa"
+ },
+ "access_based_enumeration": True,
+ "change_notify": True,
+ "encryption": False,
+ "home_directory": True,
+ "oplocks": False,
+ "continuously_available": True,
+ "show_snapshot": True,
+ "namespace_caching": True,
+ "allow_unencrypted_access": True,
+ "browsable": True,
+ "show_previous_versions": True
+ }
+ ],
+ "num_records": 1
+ }, None
+ ),
+ "no_record": (
+ 200,
+ {"num_records": 0},
+ None)
+})
+
+cifs_record_info = {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'cifs-share': {
+ 'share-name': 'cifs_share_name',
+ 'path': '/test',
+ 'vscan-fileop-profile': 'standard',
+ 'share-properties': [{'cifs-share-properties': 'browsable'}, {'cifs-share-properties': 'show_previous_versions'}],
+ 'symlink-properties': [{'cifs-share-symlink-properties': 'enable'}]
+ }
+ }
+}
+
+ZRR = zapi_responses({
+ 'cifs_record_info': build_zapi_response(cifs_record_info)
+})
+
+DEFAULT_ARGS = {
+ 'hostname': 'test',
+ 'username': 'admin',
+ 'password': 'netapp1!',
+ 'name': 'cifs_share_name',
+ 'path': '/test',
+ 'share_properties': ['browsable', 'show-previous-versions'],
+ 'symlink_properties': 'enable',
+ 'vscan_fileop_profile': 'standard',
+ 'vserver': 'abc',
+ 'use_rest': 'never'
+}
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ error = 'missing required arguments:'
+ assert error in call_main(my_main, {}, fail=True)['msg']
+
+
+def test_get():
+ register_responses([
+ ('cifs-share-get-iter', ZRR['cifs_record_info'])
+ ])
+ cifs_obj = create_module(my_module, DEFAULT_ARGS)
+ result = cifs_obj.get_cifs_share()
+ assert result
+
+
+def test_error_create():
+ register_responses([
+ ('cifs-share-get-iter', ZRR['empty']),
+ ('cifs-share-create', ZRR['error']),
+ ])
+ module_args = {
+ 'state': 'present'
+ }
+ error = create_and_apply(my_module, DEFAULT_ARGS, fail=True)['msg']
+ assert 'Error creating cifs-share' in error
+
+
+def test_create():
+ register_responses([
+ ('cifs-share-get-iter', ZRR['empty']),
+ ('cifs-share-create', ZRR['success']),
+ ])
+ module_args = {
+ 'state': 'present',
+ 'comment': 'some_comment'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_delete():
+ register_responses([
+ ('cifs-share-get-iter', ZRR['cifs_record_info']),
+ ('cifs-share-delete', ZRR['success']),
+ ])
+ module_args = {
+ 'state': 'absent'
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_delete():
+ register_responses([
+ ('cifs-share-get-iter', ZRR['cifs_record_info']),
+ ('cifs-share-delete', ZRR['error']),
+ ])
+ module_args = {
+ 'state': 'absent'
+ }
+ error = create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert 'Error deleting cifs-share' in error
+
+
+def test_modify_path():
+ register_responses([
+ ('cifs-share-get-iter', ZRR['cifs_record_info']),
+ ('cifs-share-modify', ZRR['success']),
+ ])
+ module_args = {
+ 'path': '//'
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_comment():
+ register_responses([
+ ('cifs-share-get-iter', ZRR['cifs_record_info']),
+ ('cifs-share-modify', ZRR['success']),
+ ])
+ module_args = {
+ 'comment': 'cifs modify'
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_share_properties():
+ register_responses([
+ ('cifs-share-get-iter', ZRR['cifs_record_info']),
+ ('cifs-share-modify', ZRR['success']),
+ ])
+ module_args = {
+ 'share_properties': 'oplocks'
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_symlink_properties():
+ register_responses([
+ ('cifs-share-get-iter', ZRR['cifs_record_info']),
+ ('cifs-share-modify', ZRR['success']),
+ ])
+ module_args = {
+ 'symlink_properties': 'read_only'
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_vscan_fileop_profile():
+ register_responses([
+ ('cifs-share-get-iter', ZRR['cifs_record_info']),
+ ('cifs-share-modify', ZRR['success']),
+ ])
+ module_args = {
+ 'vscan_fileop_profile': 'strict'
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_modify():
+ register_responses([
+ ('cifs-share-get-iter', ZRR['cifs_record_info']),
+ ('cifs-share-modify', ZRR['error']),
+ ])
+ module_args = {
+ 'symlink_properties': 'read'
+ }
+ error = create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert 'Error modifying cifs-share' in error
+
+
+def test_create_idempotency():
+ register_responses([
+ ('cifs-share-get-iter', ZRR['cifs_record_info'])
+ ])
+ assert create_and_apply(my_module, DEFAULT_ARGS)['changed'] is False
+
+
+def test_delete_idempotency():
+ register_responses([
+ ('cifs-share-get-iter', ZRR['empty'])
+ ])
+ module_args = {'state': 'absent'}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed'] is False
+
+
+def test_if_all_methods_catch_exception():
+ register_responses([
+ ('cifs-share-create', ZRR['error']),
+ ('cifs-share-modify', ZRR['error']),
+ ('cifs-share-delete', ZRR['error'])
+ ])
+ module_args = {}
+
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+
+ error = expect_and_capture_ansible_exception(my_obj.create_cifs_share, 'fail')['msg']
+ assert 'Error creating cifs-share cifs_share_name: NetApp API failed. Reason - 12345:synthetic error for UT purpose' in error
+
+ error = expect_and_capture_ansible_exception(my_obj.modify_cifs_share, 'fail')['msg']
+ assert 'Error modifying cifs-share cifs_share_name: NetApp API failed. Reason - 12345:synthetic error for UT purpose' in error
+
+ error = expect_and_capture_ansible_exception(my_obj.delete_cifs_share, 'fail')['msg']
+ assert 'Error deleting cifs-share cifs_share_name: NetApp API failed. Reason - 12345:synthetic error for UT purpose' in error
+
+
+ARGS_REST = {
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'use_rest': 'always',
+ 'vserver': 'test_vserver',
+ 'name': 'cifs_share_name',
+ 'path': '/',
+ 'unix_symlink': 'widelink',
+}
+
+
+def test_options_support():
+ ''' test option support '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest'])
+ ])
+ module_args = {
+ 'show_snapshot': True,
+ 'allow_unencrypted_access': True,
+ 'browsable': True
+ }
+ error = 'Error: Minimum version of ONTAP'
+ assert error in create_module(my_module, ARGS_REST, module_args, fail=True)['msg']
+
+
+def test_rest_successful_create():
+ '''Test successful rest create'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/cifs/shares', SRR['empty_records']),
+ ('POST', 'protocols/cifs/shares', SRR['empty_good']),
+ ])
+ module_args = {
+ 'comment': 'CIFS share comment',
+ 'unix_symlink': 'disable'
+ }
+ assert create_and_apply(my_module, ARGS_REST, module_args)
+
+
+def test_delete_rest():
+ ''' Test delete with rest API'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/cifs/shares', SRR['cifs_record']),
+ ('DELETE', 'protocols/cifs/shares/671aa46e-11ad-11ec-a267-005056b30cfa', SRR['empty_good']),
+ ])
+ module_args = {
+ 'state': 'absent',
+ }
+ assert create_and_apply(my_module, ARGS_REST, module_args)
+
+
+def test_rest_error_get():
+ '''Test error rest get'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/cifs/shares', SRR['generic_error']),
+ ])
+ error = create_and_apply(my_module, ARGS_REST, fail=True)['msg']
+ assert 'Error on fetching cifs shares: calling: protocols/cifs/shares: got Expected error.' in error
+
+
+def test_rest_error_create():
+ '''Test error rest create'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/cifs/shares', SRR['empty_records']),
+ ('POST', 'protocols/cifs/shares', SRR['generic_error']),
+ ])
+ error = create_and_apply(my_module, ARGS_REST, fail=True)['msg']
+ assert 'Error on creating cifs shares:' in error
+
+
+def test_error_delete_rest():
+ ''' Test error delete with rest API'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/cifs/shares', SRR['cifs_record']),
+ ('DELETE', 'protocols/cifs/shares/671aa46e-11ad-11ec-a267-005056b30cfa', SRR['generic_error']),
+ ])
+ module_args = {
+ 'state': 'absent'
+ }
+ error = create_and_apply(my_module, ARGS_REST, module_args, fail=True)['msg']
+ assert 'Error on deleting cifs shares:' in error
+
+
+def test_modify_cifs_share_path():
+ ''' test modify CIFS share path '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/cifs/shares', SRR['cifs_record']),
+ ('PATCH', 'protocols/cifs/shares/671aa46e-11ad-11ec-a267-005056b30cfa/cifs_share_name', SRR['empty_good']),
+ ])
+ module_args = {
+ 'path': "\\vol1"
+ }
+ assert create_and_apply(my_module, ARGS_REST, module_args)
+
+
+def test_modify_cifs_share_comment():
+ ''' test modify CIFS share comment '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/cifs/shares', SRR['cifs_record']),
+ ('PATCH', 'protocols/cifs/shares/671aa46e-11ad-11ec-a267-005056b30cfa/cifs_share_name', SRR['empty_good']),
+ ])
+ module_args = {
+ 'comment': "cifs comment modify"
+ }
+ assert create_and_apply(my_module, ARGS_REST, module_args)
+
+
+def test_modify_cifs_share_properties():
+ ''' test modify CIFS share properties '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/cifs/shares', SRR['cifs_record']),
+ ('PATCH', 'protocols/cifs/shares/671aa46e-11ad-11ec-a267-005056b30cfa/cifs_share_name', SRR['empty_good']),
+ ])
+ module_args = {
+ 'unix_symlink': "disable"
+ }
+ assert create_and_apply(my_module, ARGS_REST, module_args)
+
+
+def test_modify_cifs_share_properties_2():
+ ''' test modify CIFS share properties '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_13_1']),
+ ('GET', 'protocols/cifs/shares', SRR['cifs_record']),
+ ('PATCH', 'protocols/cifs/shares/671aa46e-11ad-11ec-a267-005056b30cfa/cifs_share_name', SRR['empty_good']),
+ ])
+ module_args = {
+ "access_based_enumeration": False,
+ "change_notify": False,
+ "encryption": True,
+ "oplocks": True,
+ "continuously_available": False,
+ "show_snapshot": False,
+ "namespace_caching": False,
+ "allow_unencrypted_access": False,
+ "browsable": False,
+ "show_previous_versions": False
+ }
+ assert create_and_apply(my_module, ARGS_REST, module_args)
+
+
+def test_error_modify_cifs_share_path():
+ ''' test modify CIFS share path error'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/cifs/shares', SRR['cifs_record']),
+ ('PATCH', 'protocols/cifs/shares/671aa46e-11ad-11ec-a267-005056b30cfa/cifs_share_name', SRR['generic_error']),
+ ])
+ module_args = {
+ 'path': "\\vol1"
+ }
+ error = create_and_apply(my_module, ARGS_REST, module_args, fail=True)['msg']
+ assert 'Error on modifying cifs shares:' in error
+
+
+def test_error_modify_cifs_share_comment():
+ ''' test modify CIFS share comment error'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/cifs/shares', SRR['cifs_record']),
+ ('PATCH', 'protocols/cifs/shares/671aa46e-11ad-11ec-a267-005056b30cfa/cifs_share_name', SRR['generic_error']),
+ ])
+ module_args = {
+ 'comment': "cifs comment modify"
+ }
+ error = create_and_apply(my_module, ARGS_REST, module_args, fail=True)['msg']
+ assert 'Error on modifying cifs shares:' in error
+
+
+def test_rest_successful_create_idempotency():
+ '''Test successful rest create'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/cifs/shares', SRR['cifs_record'])
+ ])
+ module_args = {
+ 'use_rest': 'always'
+ }
+ assert create_and_apply(my_module, ARGS_REST, module_args)['changed'] is False
+
+
+def test_rest_successful_delete_idempotency():
+ '''Test successful rest delete'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/cifs/shares', SRR['empty_records'])
+ ])
+ module_args = {'use_rest': 'always', 'state': 'absent'}
+ assert create_and_apply(my_module, ARGS_REST, module_args)['changed'] is False
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.has_netapp_lib')
+def test_missing_netapp_lib(mock_has_netapp_lib):
+ mock_has_netapp_lib.return_value = False
+ msg = 'Error: the python NetApp-Lib module is required. Import error: None'
+ assert msg == call_main(my_main, DEFAULT_ARGS, fail=True)['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cifs_acl.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cifs_acl.py
new file mode 100644
index 000000000..1d0d565cd
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cifs_acl.py
@@ -0,0 +1,412 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+""" unit tests for Ansible module: na_ontap_cifs_acl """
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ patch_ansible, create_module, create_and_apply, expect_and_capture_ansible_exception, AnsibleFailJson
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke, register_responses, get_mock_record
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_cifs_acl \
+ import NetAppONTAPCifsAcl as my_module, main as my_main # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+SHARE_NAME = 'share_name'
+
+acl_info = {'num-records': 1,
+ 'attributes-list':
+ {'cifs-share-access-control':
+ {'share': SHARE_NAME,
+ 'user-or-group': 'user123',
+ 'permission': 'full_control',
+ 'user-group-type': 'windows'
+ }
+ },
+ }
+
+ZRR = zapi_responses({
+ 'acl_info': build_zapi_response(acl_info),
+})
+
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'permission': 'full_control',
+ 'share_name': 'share_name',
+ 'user_or_group': 'user_or_group',
+ 'vserver': 'vserver',
+ 'use_rest': 'never',
+}
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ error_msg = create_module(my_module, fail=True)['msg']
+ for fragment in 'missing required arguments:', 'hostname', 'share_name', 'user_or_group', 'vserver':
+ assert fragment in error_msg
+ assert 'permission' not in error_msg
+
+ args = dict(DEFAULT_ARGS)
+ args.pop('permission')
+ msg = 'state is present but all of the following are missing: permission'
+ assert create_module(my_module, args, fail=True)['msg'] == msg
+
+
+def test_create():
+ register_responses([
+ ('cifs-share-access-control-get-iter', ZRR['empty']),
+ ('cifs-share-access-control-create', ZRR['success']),
+ ])
+ module_args = {
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_create_with_type():
+ register_responses([
+ ('cifs-share-access-control-get-iter', ZRR['empty']),
+ ('cifs-share-access-control-create', ZRR['success']),
+ ])
+ module_args = {
+ 'type': 'unix_group'
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_delete():
+ register_responses([
+ ('cifs-share-access-control-get-iter', ZRR['acl_info']),
+ ('cifs-share-access-control-delete', ZRR['success']),
+ ])
+ module_args = {
+ 'state': 'absent'
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_delete_idempotent():
+ register_responses([
+ ('cifs-share-access-control-get-iter', ZRR['empty']),
+ ])
+ module_args = {
+ 'state': 'absent'
+ }
+ assert not create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify():
+ register_responses([
+ ('cifs-share-access-control-get-iter', ZRR['acl_info']),
+ ('cifs-share-access-control-modify', ZRR['success']),
+ ])
+ module_args = {
+ 'permission': 'no_access',
+ 'type': 'windows'
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_create_modify_idempotent():
+ register_responses([
+ ('cifs-share-access-control-get-iter', ZRR['acl_info']),
+ ])
+ module_args = {
+ 'permission': 'full_control',
+ 'type': 'windows'
+ }
+ assert not create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_negative_modify_with_type():
+ register_responses([
+ ('cifs-share-access-control-get-iter', ZRR['acl_info']),
+ ])
+ module_args = {
+ 'type': 'unix_group'
+ }
+ msg = 'Error: changing the type is not supported by ONTAP - current: windows, desired: unix_group'
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_negative_modify_with_extra_stuff():
+ register_responses([
+ ])
+ my_module_object = create_module(my_module, DEFAULT_ARGS)
+ current = {'share_name': 'extra'}
+ msg = "Error: only permission can be changed - modify: {'share_name': 'share_name'}"
+ assert msg in expect_and_capture_ansible_exception(my_module_object.get_modify, 'fail', current)['msg']
+
+ current = {'share_name': 'extra', 'permission': 'permission'}
+ # don't check dict contents as order may differ
+ msg = "Error: only permission can be changed - modify:"
+ assert msg in expect_and_capture_ansible_exception(my_module_object.get_modify, 'fail', current)['msg']
+
+
+def test_if_all_methods_catch_exception():
+ register_responses([
+ ('cifs-share-access-control-get-iter', ZRR['error']),
+ ('cifs-share-access-control-create', ZRR['error']),
+ ('cifs-share-access-control-modify', ZRR['error']),
+ ('cifs-share-access-control-delete', ZRR['error']),
+ ])
+ my_module_object = create_module(my_module, DEFAULT_ARGS)
+
+ msg = 'Error getting cifs-share-access-control share_name: NetApp API failed. Reason - 12345:synthetic error for UT purpose'
+ assert msg in expect_and_capture_ansible_exception(my_module_object.get_cifs_acl, 'fail')['msg']
+
+ msg = 'Error creating cifs-share-access-control share_name: NetApp API failed. Reason - 12345:synthetic error for UT purpose'
+ assert msg in expect_and_capture_ansible_exception(my_module_object.create_cifs_acl, 'fail')['msg']
+
+ msg = 'Error modifying cifs-share-access-control permission share_name: NetApp API failed. Reason - 12345:synthetic error for UT purpose'
+ assert msg in expect_and_capture_ansible_exception(my_module_object.modify_cifs_acl_permission, 'fail')['msg']
+
+ msg = 'Error deleting cifs-share-access-control share_name: NetApp API failed. Reason - 12345:synthetic error for UT purpose'
+ assert msg in expect_and_capture_ansible_exception(my_module_object.delete_cifs_acl, 'fail')['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.has_netapp_lib')
+def test_missing_netapp_lib(mock_has_netapp_lib):
+ mock_has_netapp_lib.return_value = False
+ msg = 'Error: the python NetApp-Lib module is required. Import error: None'
+ assert msg in create_module(my_module, DEFAULT_ARGS, fail=True)['msg']
+
+
+def test_main():
+ register_responses([
+ ('cifs-share-access-control-get-iter', ZRR['empty']),
+ ('cifs-share-access-control-create', ZRR['success']),
+ ])
+ set_module_args(DEFAULT_ARGS)
+ assert expect_and_capture_ansible_exception(my_main, 'exit')['changed']
+
+
+SRR = rest_responses({
+ 'acl_record': (200, {"records": [
+ {
+ "svm": {
+ "uuid": "671aa46e-11ad-11ec-a267-005056b30cfa",
+ "name": "ansibleSVM"
+ },
+ "share": "share_name",
+ "user_or_group": "Everyone",
+ "permission": "full_control",
+ "type": "windows"
+ }
+ ], "num_records": 1}, None),
+ 'cifs_record': (
+ 200,
+ {
+ "records": [
+ {
+ "svm": {
+ "uuid": "671aa46e-11ad-11ec-a267-005056b30cfa",
+ "name": "ansibleSVM"
+ },
+ "name": 'share_name',
+ "path": '/',
+ "comment": 'CIFS share comment',
+ "unix_symlink": 'widelink',
+ "target": {
+ "name": "20:05:00:50:56:b3:0c:fa"
+ }
+ }
+ ],
+ "num_records": 1
+ }, None
+ )
+})
+
+ARGS_REST = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'permission': 'full_control',
+ 'share_name': 'share_name',
+ 'user_or_group': 'Everyone',
+ 'vserver': 'vserver',
+ 'type': 'windows',
+ 'use_rest': 'always',
+}
+
+
+def test_error_get_acl_rest():
+ ''' Test get error with rest API'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/cifs/shares', SRR['cifs_record']),
+ ('GET', 'protocols/cifs/shares/671aa46e-11ad-11ec-a267-005056b30cfa/share_name/acls', SRR['generic_error']),
+ ])
+ error = create_and_apply(my_module, ARGS_REST, fail=True)['msg']
+ assert 'Error on fetching cifs shares acl:' in error
+
+
+def test_error_get_share_rest():
+ ''' Test get share not exists with rest API'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/cifs/shares', SRR['generic_error']),
+ ])
+ error = create_and_apply(my_module, ARGS_REST, fail=True)['msg']
+ assert 'Error on fetching cifs shares:' in error
+
+
+def test_error_get_no_share_rest():
+ ''' Test get share not exists with rest API'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/cifs/shares', SRR['empty_records']),
+ ])
+ error = create_and_apply(my_module, ARGS_REST, fail=True)['msg']
+ assert 'Error: the cifs share does not exist:' in error
+
+
+def test_create_rest():
+ ''' Test create with rest API'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/cifs/shares', SRR['cifs_record']),
+ ('GET', 'protocols/cifs/shares/671aa46e-11ad-11ec-a267-005056b30cfa/share_name/acls', SRR['empty_records']),
+ ('POST', 'protocols/cifs/shares/671aa46e-11ad-11ec-a267-005056b30cfa/share_name/acls', SRR['empty_good']),
+ ])
+ assert create_and_apply(my_module, ARGS_REST)
+
+
+def test_delete_rest():
+ ''' Test delete with rest API'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/cifs/shares', SRR['cifs_record']),
+ ('GET', 'protocols/cifs/shares/671aa46e-11ad-11ec-a267-005056b30cfa/share_name/acls', SRR['acl_record']),
+ ('DELETE', 'protocols/cifs/shares/671aa46e-11ad-11ec-a267-005056b30cfa/share_name/acls/Everyone/windows', SRR['empty_good']),
+ ])
+ module_args = {
+ 'state': 'absent'
+ }
+ assert create_and_apply(my_module, ARGS_REST, module_args)
+
+
+def test_create_error_rest():
+ ''' Test create error with rest API'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/cifs/shares', SRR['cifs_record']),
+ ('GET', 'protocols/cifs/shares/671aa46e-11ad-11ec-a267-005056b30cfa/share_name/acls', SRR['empty_records']),
+ ('POST', 'protocols/cifs/shares/671aa46e-11ad-11ec-a267-005056b30cfa/share_name/acls', SRR['generic_error']),
+ ])
+ error = create_and_apply(my_module, ARGS_REST, fail=True)['msg']
+ assert 'Error on creating cifs share acl:' in error
+
+
+def test_error_delete_rest():
+ ''' Test delete error with rest API '''
+ module_args = {
+ 'state': 'absent'
+ }
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/cifs/shares', SRR['cifs_record']),
+ ('GET', 'protocols/cifs/shares/671aa46e-11ad-11ec-a267-005056b30cfa/share_name/acls', SRR['acl_record']),
+ ('DELETE', 'protocols/cifs/shares/671aa46e-11ad-11ec-a267-005056b30cfa/share_name/acls/Everyone/windows', SRR['generic_error']),
+ ])
+ error = create_and_apply(my_module, ARGS_REST, module_args, fail=True)['msg']
+ assert 'Error on deleting cifs share acl:' in error
+
+
+def test_modify_rest():
+ ''' Test modify with rest API '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/cifs/shares', SRR['cifs_record']),
+ ('GET', 'protocols/cifs/shares/671aa46e-11ad-11ec-a267-005056b30cfa/share_name/acls', SRR['acl_record']),
+ ('PATCH', 'protocols/cifs/shares/671aa46e-11ad-11ec-a267-005056b30cfa/share_name/acls/Everyone/windows', SRR['empty_good']),
+ ])
+ module_args = {
+ 'permission': 'no_access'
+ }
+ assert create_and_apply(my_module, ARGS_REST, module_args)
+
+
+def test_error_modify_rest():
+ ''' Test modify error with rest API '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/cifs/shares', SRR['cifs_record']),
+ ('GET', 'protocols/cifs/shares/671aa46e-11ad-11ec-a267-005056b30cfa/share_name/acls', SRR['acl_record']),
+ ('PATCH', 'protocols/cifs/shares/671aa46e-11ad-11ec-a267-005056b30cfa/share_name/acls/Everyone/windows', SRR['generic_error'])
+ ])
+ module_args = {'permission': 'no_access'}
+ error = create_and_apply(my_module, ARGS_REST, module_args, fail=True)['msg']
+ msg = 'Error modifying cifs share ACL permission: '\
+ 'calling: protocols/cifs/shares/671aa46e-11ad-11ec-a267-005056b30cfa/share_name/acls/Everyone/windows: got Expected error.'
+ assert msg == error
+
+
+def test_error_get_modify_rest():
+ ''' Test modify error with rest API '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/cifs/shares', SRR['cifs_record']),
+ ('GET', 'protocols/cifs/shares/671aa46e-11ad-11ec-a267-005056b30cfa/share_name/acls', SRR['acl_record']),
+ ])
+ module_args = {
+ 'type': 'unix_group'
+ }
+ msg = 'Error: changing the type is not supported by ONTAP - current: windows, desired: unix_group'
+ assert create_and_apply(my_module, ARGS_REST, module_args, fail=True)['msg'] == msg
+
+
+def test_negative_modify_with_extra_stuff_rest():
+ ''' Test modify error with rest API '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest'])
+ ])
+ my_module_object = create_module(my_module, ARGS_REST)
+ current = {'share_name': 'extra'}
+ msg = "Error: only permission can be changed - modify: {'share_name': 'share_name'}"
+ assert msg in expect_and_capture_ansible_exception(my_module_object.get_modify, 'fail', current)['msg']
+
+ current = {'share_name': 'extra', 'permission': 'permission'}
+ # don't check dict contents as order may differ
+ msg = "Error: only permission can be changed - modify:"
+ assert msg in expect_and_capture_ansible_exception(my_module_object.get_modify, 'fail', current)['msg']
+
+
+def test_delete_idempotent_rest():
+ ''' Test delete idempotency with rest API '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/cifs/shares', SRR['cifs_record']),
+ ('GET', 'protocols/cifs/shares/671aa46e-11ad-11ec-a267-005056b30cfa/share_name/acls', SRR['empty_records']),
+ ])
+ module_args = {
+ 'state': 'absent'
+ }
+ assert not create_and_apply(my_module, ARGS_REST, module_args)['changed']
+
+
+def test_create_modify_idempotent_rest():
+ ''' Test create and modify idempotency with rest API '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/cifs/shares', SRR['cifs_record']),
+ ('GET', 'protocols/cifs/shares/671aa46e-11ad-11ec-a267-005056b30cfa/share_name/acls', SRR['acl_record']),
+ ])
+ module_args = {
+ 'permission': 'full_control',
+ 'type': 'windows'
+ }
+ assert not create_and_apply(my_module, ARGS_REST, module_args)['changed']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cifs_local_group.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cifs_local_group.py
new file mode 100644
index 000000000..afe73d191
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cifs_local_group.py
@@ -0,0 +1,218 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests ONTAP Ansible module: na_ontap_cifs_local_group '''
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ patch_ansible, call_main, create_module, expect_and_capture_ansible_exception, AnsibleFailJson, create_and_apply
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke, register_responses, get_mock_record
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_cifs_local_group \
+ import NetAppOntapCifsLocalGroup as group_module, main as my_main # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+# REST API canned responses when mocking send_request
+SRR = rest_responses({
+ # module specific responses
+ 'group_record': (200, {"records": [
+ {
+ "svm": {
+ "uuid": "671aa46e-11ad-11ec-a267-005056b30cfa",
+ "name": "ansible"
+ },
+ 'name': 'BUILTIN\\Guests',
+ 'sid': 'S-1-5-21-256008430-3394229847-3930036330-1001',
+ }
+ ], "num_records": 1}, None),
+ "no_record": (
+ 200,
+ {"num_records": 0},
+ None)
+})
+
+
+ARGS_REST = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'vserver': 'ansible',
+ 'name': 'BUILTIN\\GUESTS',
+}
+
+
+def test_get_existent_cifs_local_group_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/cifs/local-groups', SRR['group_record']),
+ ])
+ cifs_obj = create_module(group_module, ARGS_REST)
+ result = cifs_obj.get_cifs_local_group_rest()
+ assert result
+
+
+def test_error_get_existent_cifs_local_group_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/cifs/local-groups', SRR['generic_error']),
+ ])
+ module_args = {
+ 'vserver': 'ansible',
+ 'name': 'BUILTIN\\GUESTS',
+ }
+ error = call_main(my_main, ARGS_REST, module_args, fail=True)['msg']
+ msg = 'Error on fetching cifs local-group:'
+ assert msg in error
+
+
+def test_create_cifs_group_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/cifs/local-groups', SRR['empty_records']),
+ ('POST', 'protocols/cifs/local-groups', SRR['empty_good']),
+ ])
+ module_args = {
+ 'vserver': 'ansible',
+ 'name': 'BUILTIN\\GUESTS'
+ }
+ assert call_main(my_main, ARGS_REST, module_args)['changed']
+
+
+def test_error_create_cifs_group_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/cifs/local-groups', SRR['empty_records']),
+ ('POST', 'protocols/cifs/local-groups', SRR['generic_error']),
+ ])
+ module_args = {
+ 'vserver': 'ansible',
+ 'name': 'BUILTIN\\GUESTS',
+ }
+ error = call_main(my_main, ARGS_REST, module_args, fail=True)['msg']
+ msg = "Error on creating cifs local-group:"
+ assert msg in error
+
+
+def test_delete_cifs_group_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/cifs/local-groups', SRR['group_record']),
+ ('DELETE', 'protocols/cifs/local-groups/671aa46e-11ad-11ec-a267-005056b30cfa/'
+ 'S-1-5-21-256008430-3394229847-3930036330-1001', SRR['empty_good']),
+ ])
+ module_args = {
+ 'vserver': 'ansible',
+ 'name': 'BUILTIN\\GUESTS',
+ 'state': 'absent'
+ }
+ assert call_main(my_main, ARGS_REST, module_args)['changed']
+
+
+def test_error_delete_cifs_group_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/cifs/local-groups', SRR['group_record']),
+ ('DELETE', 'protocols/cifs/local-groups/671aa46e-11ad-11ec-a267-005056b30cfa/'
+ 'S-1-5-21-256008430-3394229847-3930036330-1001', SRR['generic_error']),
+ ])
+ module_args = {
+ 'vserver': 'ansible',
+ 'name': 'BUILTIN\\GUESTS',
+ 'state': 'absent'
+ }
+ error = call_main(my_main, ARGS_REST, module_args, fail=True)['msg']
+ msg = "Error on deleting cifs local-group:"
+ assert msg in error
+
+
+def test_modify_cifs_group_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/cifs/local-groups', SRR['group_record']),
+ ('PATCH', 'protocols/cifs/local-groups/671aa46e-11ad-11ec-a267-005056b30cfa/'
+ 'S-1-5-21-256008430-3394229847-3930036330-1001', SRR['empty_good']),
+ ])
+ module_args = {
+ 'vserver': 'ansible',
+ 'name': 'BUILTIN\\GUESTS',
+ 'description': 'This is local group'
+ }
+ assert call_main(my_main, ARGS_REST, module_args)['changed']
+
+
+def test_error_modify_cifs_group_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/cifs/local-groups', SRR['group_record']),
+ ('PATCH', 'protocols/cifs/local-groups/671aa46e-11ad-11ec-a267-005056b30cfa/'
+ 'S-1-5-21-256008430-3394229847-3930036330-1001', SRR['generic_error']),
+ ])
+ module_args = {
+ 'vserver': 'ansible',
+ 'name': 'BUILTIN\\GUESTS',
+ 'description': 'This is local group'
+ }
+ error = call_main(my_main, ARGS_REST, module_args, fail=True)['msg']
+ msg = "Error on modifying cifs local-group:"
+ assert msg in error
+
+
+def test_rename_cifs_group_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/cifs/local-groups', SRR['group_record']),
+ ('PATCH', 'protocols/cifs/local-groups/671aa46e-11ad-11ec-a267-005056b30cfa/'
+ 'S-1-5-21-256008430-3394229847-3930036330-1001', SRR['empty_good']),
+ ])
+ module_args = {
+ 'vserver': 'ansible',
+ 'from_name': 'BUILTIN\\GUESTS',
+ 'name': 'ANSIBLE_CIFS\\test_users'
+ }
+ assert call_main(my_main, ARGS_REST, module_args)['changed']
+
+
+def test_error_rest_rename_cifs_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_0']),
+ ('GET', 'protocols/cifs/local-groups', SRR['empty_records']),
+ ('GET', 'protocols/cifs/local-groups', SRR['empty_records']),
+ ])
+ module_args = {
+ 'vserver': 'ansible',
+ 'from_name': 'BUILTIN\\GUESTS_user',
+ 'name': 'ANSIBLE_CIFS\\test_users'
+ }
+ error = create_and_apply(group_module, ARGS_REST, module_args, fail=True)['msg']
+ assert 'Error renaming cifs local group:' in error
+
+
+def test_successfully_create_group_rest_idempotency():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/cifs/local-groups', SRR['group_record']),
+ ])
+ module_args = {
+ 'vserver': 'ansible',
+ 'name': 'BUILTIN\\GUESTS',
+ }
+ assert not call_main(my_main, ARGS_REST, module_args)['changed']
+
+
+def test_successfully_destroy_group_rest_idempotency():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/cifs/local-groups', SRR['empty_records']),
+ ])
+ module_args = {
+ 'state': 'absent'
+ }
+ assert not call_main(my_main, ARGS_REST, module_args)['changed']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cifs_local_group_member.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cifs_local_group_member.py
new file mode 100644
index 000000000..8afd0c56a
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cifs_local_group_member.py
@@ -0,0 +1,338 @@
+# (c) 2018-2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests ONTAP Ansible module: na_ontap_cifs_local_group_member '''
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ patch_ansible, call_main, create_module, expect_and_capture_ansible_exception, AnsibleFailJson
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke, register_responses, get_mock_record
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_cifs_local_group_member \
+ import NetAppOntapCifsLocalGroupMember as group_member_module, main as my_main # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+# REST API canned responses when mocking send_request
+SRR = rest_responses({
+ # module specific responses
+ 'group_member_record': (200, {"records": [
+ {
+ "svm": {
+ "uuid": "671aa46e-11ad-11ec-a267-005056b30cfa",
+ "name": "vserver"
+ },
+ 'group_name': 'BUILTIN\\Guests',
+ 'member': 'test',
+ 'sid': 'S-1-5-21-256008430-3394229847-3930036330-1001',
+ }
+ ], "num_records": 1}, None),
+ 'group_record': (200, {"records": [
+ {
+ "svm": {
+ "uuid": "671aa46e-11ad-11ec-a267-005056b30cfa",
+ "name": "vserver"
+ },
+ 'group_name': 'BUILTIN\\Guests',
+ 'sid': 'S-1-5-21-256008430-3394229847-3930036330-1001',
+ }
+ ], "num_records": 1}, None),
+ "no_record": (
+ 200,
+ {"num_records": 0},
+ None)
+})
+
+group_member_info = {'num-records': 1,
+ 'attributes-list':
+ {'cifs-local-group-members':
+ {'group-name': 'BUILTIN\\GUESTS',
+ 'member': 'test',
+ 'vserver': 'ansible'
+ }
+ },
+ }
+
+ZRR = zapi_responses({
+ 'group_member_info': build_zapi_response(group_member_info)
+})
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'vserver': 'ansible',
+ 'group': 'BUILTIN\\GUESTS',
+ 'member': 'test',
+ 'use_rest': 'never',
+}
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ group_member_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+
+def test_get_nonexistent_cifs_group_member():
+ register_responses([
+ ('cifs-local-group-members-get-iter', ZRR['empty'])
+ ])
+ cifs_obj = create_module(group_member_module, DEFAULT_ARGS)
+ result = cifs_obj.get_cifs_local_group_member()
+ assert result is None
+
+
+def test_get_existent_cifs_group_member():
+ register_responses([
+ ('cifs-local-group-members-get-iter', ZRR['group_member_info'])
+ ])
+ cifs_obj = create_module(group_member_module, DEFAULT_ARGS)
+ result = cifs_obj.get_cifs_local_group_member()
+ assert result
+
+
+def test_successfully_add_members_zapi():
+ register_responses([
+ ('cifs-local-group-members-get-iter', ZRR['empty']),
+ ('cifs-local-group-members-add-members', ZRR['success']),
+ ])
+ module_args = {
+ 'vserver': 'ansible',
+ 'group': 'BUILTIN\\GUESTS',
+ 'member': 'test',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_add_members_zapi():
+ register_responses([
+ ('cifs-local-group-members-get-iter', ZRR['empty']),
+ ('cifs-local-group-members-add-members', ZRR['error']),
+ ])
+ module_args = {
+ 'vserver': 'ansible',
+ 'group': 'BUILTIN\\GUESTS',
+ 'member': 'test',
+ }
+ error = call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ msg = "Error adding member"
+ assert msg in error
+
+
+def test_successfully_remove_members_zapi():
+ register_responses([
+ ('cifs-local-group-members-get-iter', ZRR['group_member_info']),
+ ('cifs-local-group-members-remove-members', ZRR['success']),
+ ])
+ module_args = {
+ 'vserver': 'ansible',
+ 'group': 'BUILTIN\\GUESTS',
+ 'member': 'test',
+ 'state': 'absent'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_remove_members_zapi():
+ register_responses([
+ ('cifs-local-group-members-get-iter', ZRR['group_member_info']),
+ ('cifs-local-group-members-remove-members', ZRR['error']),
+ ])
+ module_args = {
+ 'vserver': 'ansible',
+ 'group': 'BUILTIN\\GUESTS',
+ 'member': 'test',
+ 'state': 'absent'
+ }
+ error = call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ msg = "Error removing member"
+ assert msg in error
+
+
+def test_successfully_add_members_zapi_idempotency():
+ register_responses([
+ ('cifs-local-group-members-get-iter', ZRR['group_member_info']),
+ ])
+ module_args = {
+ 'vserver': 'ansible',
+ 'group': 'BUILTIN\\GUESTS',
+ 'member': 'test',
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successfully_remove_members_zapi_idempotency():
+ register_responses([
+ ('cifs-local-group-members-get-iter', ZRR['empty']),
+ ])
+ module_args = {
+ 'state': 'absent'
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+ARGS_REST = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'vserver': 'ansible',
+ 'group': 'BUILTIN\\GUESTS',
+ 'member': 'test',
+ 'use_rest': 'always',
+}
+
+
+def test_get_nonexistent_cifs_local_group_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/cifs/local-groups', SRR['empty_records']),
+ ])
+ module_args = {
+ 'vserver': 'ansible',
+ 'group': 'nogroup',
+ 'member': 'test',
+ }
+ error = call_main(my_main, ARGS_REST, module_args, fail=True)['msg']
+ msg = 'CIFS local group nogroup does not exist on vserver ansible'
+ assert msg in error
+
+
+def test_get_existent_cifs_local_group_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/cifs/local-groups', SRR['group_record']),
+ ('GET', 'protocols/cifs/local-groups/671aa46e-11ad-11ec-a267-005056b30cfa/'
+ 'S-1-5-21-256008430-3394229847-3930036330-1001/members', SRR['group_member_record']),
+ ])
+ cifs_obj = create_module(group_member_module, ARGS_REST)
+ result = cifs_obj.get_cifs_local_group_member()
+ assert result
+
+
+def test_error_get_existent_cifs_local_group_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/cifs/local-groups', SRR['group_record']),
+ ('GET', 'protocols/cifs/local-groups/671aa46e-11ad-11ec-a267-005056b30cfa/'
+ 'S-1-5-21-256008430-3394229847-3930036330-1001/members', SRR['generic_error']),
+ ])
+ module_args = {
+ 'vserver': 'ansible',
+ 'group': 'BUILTIN\\GUESTS',
+ 'member': 'test',
+ }
+ error = call_main(my_main, ARGS_REST, module_args, fail=True)['msg']
+ msg = 'Error getting CIFS local group members for group BUILTIN\\GUESTS on vserver ansible'
+ assert msg in error
+
+
+def test_add_cifs_group_member_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/cifs/local-groups', SRR['group_record']),
+ ('GET', 'protocols/cifs/local-groups/671aa46e-11ad-11ec-a267-005056b30cfa/'
+ 'S-1-5-21-256008430-3394229847-3930036330-1001/members', SRR['empty_records']),
+ ('POST', 'protocols/cifs/local-groups/671aa46e-11ad-11ec-a267-005056b30cfa/'
+ 'S-1-5-21-256008430-3394229847-3930036330-1001/members', SRR['empty_good']),
+ ])
+ module_args = {
+ 'vserver': 'ansible',
+ 'group': 'BUILTIN\\GUESTS',
+ 'member': 'test',
+ }
+ assert call_main(my_main, ARGS_REST, module_args)['changed']
+
+
+def test_error_add_cifs_group_member_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/cifs/local-groups', SRR['group_record']),
+ ('GET', 'protocols/cifs/local-groups/671aa46e-11ad-11ec-a267-005056b30cfa/'
+ 'S-1-5-21-256008430-3394229847-3930036330-1001/members', SRR['empty_records']),
+ ('POST', 'protocols/cifs/local-groups/671aa46e-11ad-11ec-a267-005056b30cfa/'
+ 'S-1-5-21-256008430-3394229847-3930036330-1001/members', SRR['generic_error']),
+ ])
+ module_args = {
+ 'vserver': 'ansible',
+ 'group': 'BUILTIN\\GUESTS',
+ 'member': 'test',
+ }
+ error = call_main(my_main, ARGS_REST, module_args, fail=True)['msg']
+ msg = "Error adding member test to cifs local group BUILTIN\\GUESTS on vserver"
+ assert msg in error
+
+
+def test_remove_cifs_group_member_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/cifs/local-groups', SRR['group_record']),
+ ('GET', 'protocols/cifs/local-groups/671aa46e-11ad-11ec-a267-005056b30cfa/'
+ 'S-1-5-21-256008430-3394229847-3930036330-1001/members', SRR['group_member_record']),
+ ('DELETE', 'protocols/cifs/local-groups/671aa46e-11ad-11ec-a267-005056b30cfa/'
+ 'S-1-5-21-256008430-3394229847-3930036330-1001/members', SRR['empty_good']),
+ ])
+ module_args = {
+ 'vserver': 'ansible',
+ 'group': 'BUILTIN\\GUESTS',
+ 'member': 'test',
+ 'state': 'absent'
+ }
+ assert call_main(my_main, ARGS_REST, module_args)['changed']
+
+
+def test_error_remove_cifs_group_member_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/cifs/local-groups', SRR['group_record']),
+ ('GET', 'protocols/cifs/local-groups/671aa46e-11ad-11ec-a267-005056b30cfa/'
+ 'S-1-5-21-256008430-3394229847-3930036330-1001/members', SRR['group_member_record']),
+ ('DELETE', 'protocols/cifs/local-groups/671aa46e-11ad-11ec-a267-005056b30cfa/'
+ 'S-1-5-21-256008430-3394229847-3930036330-1001/members', SRR['generic_error']),
+ ])
+ module_args = {
+ 'vserver': 'ansible',
+ 'group': 'BUILTIN\\GUESTS',
+ 'member': 'test',
+ 'state': 'absent'
+ }
+ error = call_main(my_main, ARGS_REST, module_args, fail=True)['msg']
+ msg = "Error removing member test from cifs local group BUILTIN\\GUESTS on vserver ansible"
+ assert msg in error
+
+
+def test_successfully_add_members_rest_idempotency():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/cifs/local-groups', SRR['group_record']),
+ ('GET', 'protocols/cifs/local-groups/671aa46e-11ad-11ec-a267-005056b30cfa/'
+ 'S-1-5-21-256008430-3394229847-3930036330-1001/members', SRR['group_member_record']),
+ ])
+ module_args = {
+ 'vserver': 'ansible',
+ 'group': 'BUILTIN\\GUESTS',
+ 'member': 'test',
+ }
+ assert not call_main(my_main, ARGS_REST, module_args)['changed']
+
+
+def test_successfully_remove_members_rest_idempotency():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/cifs/local-groups', SRR['group_record']),
+ ('GET', 'protocols/cifs/local-groups/671aa46e-11ad-11ec-a267-005056b30cfa/'
+ 'S-1-5-21-256008430-3394229847-3930036330-1001/members', SRR['empty_records']),
+ ])
+ module_args = {
+ 'state': 'absent'
+ }
+ assert not call_main(my_main, ARGS_REST, module_args)['changed']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cifs_local_user.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cifs_local_user.py
new file mode 100644
index 000000000..812512a06
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cifs_local_user.py
@@ -0,0 +1,204 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import sys
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args, \
+ patch_ansible, call_main, create_and_apply, create_module, expect_and_capture_ansible_exception
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import\
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_cifs_local_user \
+ import NetAppOntapCifsLocalUser as my_module, main as my_main # module under test
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+SRR = rest_responses({
+ 'local_user_sid': (200, {
+ "records": [{
+ "sid": "S-1-5-21-256008430-3394229847-3930036330-1001",
+ "members": [{
+ "name": "string"
+ }],
+ "name": "SMB_SERVER01\\username",
+ "svm": {
+ "name": "svm1",
+ "uuid": "02c9e252-41be-11e9-81d5-00a0986138f7"
+ },
+ "description": "This is a local group",
+ "full_name": "User Name",
+ "account_disabled": False
+ }]
+ }, None),
+ 'svm_uuid': (200, {"records": [
+ {
+ 'uuid': 'e3cb5c7f-cd20'
+ }], "num_records": 1}, None),
+})
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'vserver': 'vserver',
+ 'name': "username"
+}
+
+
+def test_low_version():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ])
+ error = create_module(my_module, DEFAULT_ARGS, fail=True)['msg']
+ print('Info: %s' % error)
+ msg = 'Error: na_ontap_cifs_local_user only supports REST, and requires ONTAP 9.10.1 or later.'
+ assert msg in error
+
+
+def test_get_svm_uuid_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['generic_error']),
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ error = expect_and_capture_ansible_exception(my_obj.get_svm_uuid, 'fail')['msg']
+ print('Info: %s' % error)
+ assert 'Error fetching vserver vserver: calling: svm/svms: got Expected error.' == error
+
+
+def test_get_cifs_local_user_none():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_uuid']),
+ ('GET', 'protocols/cifs/local-users', SRR['zero_records']),
+ ])
+ set_module_args(DEFAULT_ARGS)
+ my_obj = my_module()
+ assert my_obj.get_cifs_local_user() is None
+
+
+def test_get_cifs_local_user_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_uuid']),
+ ('GET', 'protocols/cifs/local-users', SRR['generic_error']),
+ ])
+ my_module_object = create_module(my_module, DEFAULT_ARGS)
+ msg = 'Error fetching cifs/local-user username: calling: protocols/cifs/local-users: got Expected error.'
+ assert msg in expect_and_capture_ansible_exception(my_module_object.get_cifs_local_user, 'fail')['msg']
+
+
+def test_get_cifs_local_user():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_uuid']),
+ ('GET', 'protocols/cifs/local-users', SRR['local_user_sid']),
+ ])
+ set_module_args(DEFAULT_ARGS)
+ my_obj = my_module()
+ assert my_obj.get_cifs_local_user() is not None
+
+
+def test_create_cifs_local_user():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_uuid']),
+ ('GET', 'protocols/cifs/local-users', SRR['empty_records']),
+ ('POST', 'protocols/cifs/local-users', SRR['empty_good'])
+ ])
+ module_args = {'name': 'username',
+ 'user_password': 'password',
+ 'account_disabled': 'False',
+ 'full_name': 'User Name',
+ 'description': 'Test user'}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_create_cifs_local_user_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('POST', 'protocols/cifs/local-users', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['name'] = 'username'
+ my_obj.parameters['user_password'] = 'password'
+ my_obj.parameters['account_disabled'] = False
+ my_obj.parameters['full_name'] = 'User Name'
+ my_obj.parameters['description'] = 'This is a local group'
+ error = expect_and_capture_ansible_exception(my_obj.create_cifs_local_user, 'fail')['msg']
+ print('Info: %s' % error)
+ assert 'Error creating CIFS local users with name username: calling: protocols/cifs/local-users: got Expected error.' == error
+
+
+def test_delete_cifs_local_user():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_uuid']),
+ ('GET', 'protocols/cifs/local-users', SRR['local_user_sid']),
+ ('DELETE', 'protocols/cifs/local-users/e3cb5c7f-cd20/S-1-5-21-256008430-3394229847-3930036330-1001', SRR['empty_good'])
+ ])
+ module_args = {'name': 'username',
+ 'state': 'absent',
+ 'user_password': 'password',
+ 'description': 'This is a local group'}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_delete_cifs_local_user_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('DELETE', 'protocols/cifs/local-users/e3cb5c7f-cd20/S-1-5-21-256008430-3394229847-3930036330-1001', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.svm_uuid = 'e3cb5c7f-cd20'
+ my_obj.sid = 'S-1-5-21-256008430-3394229847-3930036330-1001'
+ my_obj.parameters['name'] = 'username'
+ my_obj.parameters['state'] = 'absent'
+ my_obj.parameters['user_password'] = 'password'
+ my_obj.parameters['description'] = 'This is a local group'
+ error = expect_and_capture_ansible_exception(my_obj.delete_cifs_local_user, 'fail')['msg']
+ print('Info: %s' % error)
+ assert 'Error while deleting CIFS local user: calling: '\
+ 'protocols/cifs/local-users/e3cb5c7f-cd20/S-1-5-21-256008430-3394229847-3930036330-1001: got Expected error.' == error
+
+
+def test_modify_cifs_local_user():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_uuid']),
+ ('GET', 'protocols/cifs/local-users', SRR['local_user_sid']),
+ ('PATCH', 'protocols/cifs/local-users/e3cb5c7f-cd20/S-1-5-21-256008430-3394229847-3930036330-1001', SRR['empty_good'])
+ ])
+ module_args = {'name': 'username',
+ 'user_password': 'mypassword',
+ 'description': 'This is a local group2',
+ 'account_disabled': True,
+ 'full_name': 'Full Name'}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_cifs_local_user_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('PATCH', 'protocols/cifs/local-users/e3cb5c7f-cd20/S-1-5-21-256008430-3394229847-3930036330-1001', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.svm_uuid = 'e3cb5c7f-cd20'
+ my_obj.sid = 'S-1-5-21-256008430-3394229847-3930036330-1001'
+ my_obj.parameters['name'] = 'username'
+ my_obj.parameters['user_password'] = 'mypassword'
+ my_obj.parameters['description'] = 'This is a local group2'
+ current = {'description': 'This is a local group'}
+ error = expect_and_capture_ansible_exception(my_obj.modify_cifs_local_user, 'fail', current)['msg']
+ print('Info: %s' % error)
+ assert 'Error while modifying CIFS local user: calling: '\
+ 'protocols/cifs/local-users/e3cb5c7f-cd20/S-1-5-21-256008430-3394229847-3930036330-1001: got Expected error.' == error
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cifs_local_user_modify.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cifs_local_user_modify.py
new file mode 100644
index 000000000..44e75a856
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cifs_local_user_modify.py
@@ -0,0 +1,223 @@
+''' unit tests ONTAP Ansible module: na_ontap_cifs_local_user_modify '''
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat import unittest
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_cifs_local_user_modify \
+ import NetAppOntapCifsLocalUserModify as cifs_user_module # module under test
+
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+# REST API canned responses when mocking send_request
+SRR = {
+ # common responses
+ 'is_rest': (200, {}, None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'end_of_sequence': (500, None, "Ooops, the UT needs one more SRR response"),
+ 'generic_error': (400, None, "Expected error"),
+ # module specific responses
+ 'cifs_user_record': (200, {
+ "records": [{
+ 'vserver': 'ansible',
+ 'user_name': 'ANSIBLE\\Administrator',
+ 'is_account_disabled': False,
+ 'full_name': 'test user',
+ 'description': 'builtin admin'
+ }]
+ }, None)
+}
+
+
+class MockONTAPConnection(object):
+ ''' mock server connection to ONTAP host '''
+
+ def __init__(self, kind=None):
+ ''' save arguments '''
+ self.type = kind
+ self.xml_in = None
+ self.xml_out = None
+
+ def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
+ ''' mock invoke_successfully returning xml data '''
+ self.xml_in = xml
+ if self.type == 'local_user':
+ xml = self.build_local_user_info()
+ elif self.type == 'local_user_fail':
+ raise netapp_utils.zapi.NaApiError(code='TEST', message="This exception is from the unit test")
+ self.xml_out = xml
+ return xml
+
+ @staticmethod
+ def build_local_user_info():
+ ''' build xml data for cifs-local-user '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ data = {
+ 'attributes-list': {
+ 'cifs-local-user': {
+ 'user-name': 'ANSIBLE\\Administrator',
+ 'is-account-disabled': 'false',
+ 'vserver': 'ansible',
+ 'full-name': 'test user',
+ 'description': 'builtin admin'
+ }
+ }
+ }
+ xml.translate_struct(data)
+ return xml
+
+
+class TestMyModule(unittest.TestCase):
+ ''' a group of related Unit Tests '''
+
+ def setUp(self):
+ self.server = MockONTAPConnection()
+ self.onbox = False
+
+ def set_default_args(self, use_rest=None):
+ if self.onbox:
+ hostname = '10.10.10.10'
+ username = 'username'
+ password = 'password'
+ vserver = 'ansible'
+ name = 'ANSIBLE\\Administrator'
+ is_account_disabled = False
+
+ else:
+ hostname = '10.10.10.10'
+ username = 'username'
+ password = 'password'
+ vserver = 'ansible'
+ name = 'ANSIBLE\\Administrator'
+ is_account_disabled = False
+
+ args = dict({
+ 'hostname': hostname,
+ 'username': username,
+ 'password': password,
+ 'vserver': vserver,
+ 'name': name,
+ 'is_account_disabled': is_account_disabled
+ })
+
+ if use_rest is not None:
+ args['use_rest'] = use_rest
+
+ return args
+
+ @staticmethod
+ def get_local_user_mock_object(cx_type='zapi', kind=None):
+ local_user_obj = cifs_user_module()
+ if cx_type == 'zapi':
+ if kind is None:
+ local_user_obj.server = MockONTAPConnection()
+ else:
+ local_user_obj.server = MockONTAPConnection(kind=kind)
+ return local_user_obj
+
+ def test_module_fail_when_required_args_missing(self):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ cifs_user_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+ def test_ensure_get_called(self):
+ ''' test get_cifs_local_user_modify for non-existent user'''
+ set_module_args(self.set_default_args(use_rest='Never'))
+ print('starting')
+ my_obj = cifs_user_module()
+ print('use_rest:', my_obj.use_rest)
+ my_obj.server = self.server
+ assert my_obj.get_cifs_local_user is not None
+
+ def test_ensure_get_called_existing(self):
+ ''' test get_cifs_local_user_modify for existing user'''
+ set_module_args(self.set_default_args(use_rest='Never'))
+ my_obj = cifs_user_module()
+ my_obj.server = MockONTAPConnection(kind='local_user')
+ assert my_obj.get_cifs_local_user()
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_cifs_local_user_modify.NetAppOntapCifsLocalUserModify.modify_cifs_local_user')
+ def test_successful_modify(self, modify_cifs_local_user):
+ ''' enabling local cifs user and testing idempotency '''
+ data = self.set_default_args(use_rest='Never')
+ data['is_account_disabled'] = True
+ set_module_args(data)
+ my_obj = cifs_user_module()
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('local_user')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+ # to reset na_helper from remembering the previous 'changed' value
+ data = self.set_default_args(use_rest='Never')
+ set_module_args(data)
+ my_obj = cifs_user_module()
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('local_user')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert not exc.value.args[0]['changed']
+
+ def test_if_all_methods_catch_exception(self):
+ data = self.set_default_args(use_rest='Never')
+ set_module_args(data)
+ my_obj = cifs_user_module()
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('local_user_fail')
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.modify_cifs_local_user(modify={})
+ assert 'Error modifying local CIFS user' in exc.value.args[0]['msg']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_error(self, mock_request):
+ data = self.set_default_args()
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['generic_error'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_local_user_mock_object(cx_type='rest').apply()
+ msg = 'calling: private/cli/vserver/cifs/users-and-groups/local-user: got %s.' % SRR['generic_error'][2]
+ assert msg in exc.value.args[0]['msg']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_successful_modify_rest(self, mock_request):
+ data = self.set_default_args()
+ data['is_account_disabled'] = True
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['cifs_user_record'], # get
+ SRR['empty_good'], # post
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_local_user_mock_object(cx_type='rest').apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_idempotent_modify_rest(self, mock_request):
+ data = self.set_default_args()
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['cifs_user_record'], # get
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_local_user_mock_object(cx_type='rest').apply()
+ assert not exc.value.args[0]['changed']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cifs_local_user_set_password.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cifs_local_user_set_password.py
new file mode 100644
index 000000000..62c8352b7
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cifs_local_user_set_password.py
@@ -0,0 +1,66 @@
+# (c) 2021-2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test for ONTAP disks Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import call_main, patch_ansible
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import zapi_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_cifs_local_user_set_password import main as my_main # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+ZRR = zapi_responses({
+})
+
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'use_rest': 'never',
+ 'user_password': 'test',
+ 'user_name': 'user1',
+ 'vserver': 'svm1',
+}
+
+
+def test_successful_set_password(patch_ansible):
+ ''' successful set '''
+ register_responses([
+ ('ZAPI', 'cifs-local-user-set-password', ZRR['success']),
+ ])
+ assert call_main(my_main, DEFAULT_ARGS)['changed']
+
+
+def test_module_fail_when_required_args_missing(patch_ansible):
+ ''' required arguments are reported as errors '''
+ register_responses([
+ ])
+ error = 'missing required arguments:'
+ assert error in call_main(my_main, {}, fail=True)['msg']
+
+
+def test_if_all_methods_catch_exception(patch_ansible):
+ register_responses([
+ ('ZAPI', 'cifs-local-user-set-password', ZRR['error']),
+ ])
+ assert 'Error setting password ' in call_main(my_main, DEFAULT_ARGS, fail=True)['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.has_netapp_lib')
+def test_fail_netapp_lib_error(mock_has_netapp_lib):
+ mock_has_netapp_lib.return_value = False
+ error = 'Error: the python NetApp-Lib module is required. Import error: None'
+ assert error in call_main(my_main, DEFAULT_ARGS, fail=True)['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cifs_local_user_set_password_rest.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cifs_local_user_set_password_rest.py
new file mode 100644
index 000000000..f32a1b2fa
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cifs_local_user_set_password_rest.py
@@ -0,0 +1,101 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import sys
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import\
+ patch_ansible, call_main, create_and_apply, create_module, expect_and_capture_ansible_exception
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import\
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_cifs_local_user_set_password \
+ import NetAppONTAPCifsSetPassword as my_module, main as my_main # module under test
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+SRR = rest_responses({
+ 'svm_uuid': (200, {"records": [
+ {
+ 'uuid': 'e3cb5c7f-cd20'
+ }], "num_records": 1}, None),
+ 'local_user_sid': (200, {"records": [{'sid': '1234-sd'}]}, None)
+})
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'user_name': 'carchi8py',
+ 'user_password': 'p@SSWord',
+ 'vserver': 'vserver'
+}
+
+
+def test_change_password():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_uuid']),
+ ('GET', 'protocols/cifs/local-users', SRR['local_user_sid']),
+ ('PATCH', 'protocols/cifs/local-users/e3cb5c7f-cd20/1234-sd', SRR['empty_good'])
+ ])
+ assert create_and_apply(my_module, DEFAULT_ARGS, {})['changed']
+
+
+def test_get_svm_uuid_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['generic_error']),
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ error = expect_and_capture_ansible_exception(my_obj.get_svm_uuid, 'fail')['msg']
+ print('Info: %s' % error)
+ assert 'Error fetching vserver vserver: calling: svm/svms: got Expected error.' == error
+
+
+def test_get_cifs_local_users_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/cifs/local-users', SRR['generic_error']),
+ # 2nd call
+ ('GET', 'protocols/cifs/local-users', SRR['zero_records']),
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ error = expect_and_capture_ansible_exception(my_obj.get_user_sid, 'fail')['msg']
+ print('Info: %s' % error)
+ assert 'Error fetching cifs/local-user carchi8py: calling: protocols/cifs/local-users: got Expected error.' == error
+ # no user
+ error = 'Error no cifs/local-user with name carchi8py'
+ assert error in expect_and_capture_ansible_exception(my_obj.get_user_sid, 'fail')['msg']
+
+
+def test_patch_cifs_local_users_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_uuid']),
+ ('GET', 'protocols/cifs/local-users', SRR['local_user_sid']),
+ ('PATCH', 'protocols/cifs/local-users/e3cb5c7f-cd20/1234-sd', SRR['generic_error']),
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ error = expect_and_capture_ansible_exception(my_obj.cifs_local_set_passwd_rest, 'fail')['msg']
+ print('Info: %s' % error)
+ assert 'Error change password for user carchi8py: calling: protocols/cifs/local-users/e3cb5c7f-cd20/1234-sd: got Expected error.' == error
+
+
+def test_fail_old_version():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ])
+ module_args = {
+ 'use_rest': 'always'
+ }
+ error = 'Error: REST requires ONTAP 9.10.1 or later for protocols/cifs/local-users APIs.'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cifs_server.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cifs_server.py
new file mode 100644
index 000000000..820c33d17
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cifs_server.py
@@ -0,0 +1,770 @@
+# (c) 2018-2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests ONTAP Ansible module: na_ontap_cifs_server '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ patch_ansible, create_module, create_and_apply, expect_and_capture_ansible_exception, AnsibleFailJson
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke, register_responses, get_mock_record
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_cifs_server \
+ import NetAppOntapcifsServer as my_module # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+# REST API canned responses when mocking send_request
+SRR = rest_responses({
+ # module specific responses
+ 'cifs_record': (
+ 200,
+ {
+ "records": [
+ {
+ "svm": {
+ "uuid": "671aa46e-11ad-11ec-a267-005056b30cfa",
+ "name": "ansibleSVM"
+ },
+ "enabled": True,
+ "security": {
+ "encrypt_dc_connection": False,
+ "smb_encryption": False,
+ "kdc_encryption": False,
+ "smb_signing": False,
+ "restrict_anonymous": "no_enumeration",
+ "aes_netlogon_enabled": False,
+ "ldap_referral_enabled": False,
+ "session_security": "none",
+ "try_ldap_channel_binding": True,
+ "use_ldaps": False,
+ "use_start_tls": False
+ },
+ "target": {
+ "name": "20:05:00:50:56:b3:0c:fa"
+ },
+ "name": "cifs_server_name"
+ }
+ ],
+ "num_records": 1
+ }, None
+ ),
+ 'cifs_record_disabled': (
+ 200,
+ {
+ "records": [
+ {
+ "svm": {
+ "uuid": "671aa46e-11ad-11ec-a267-005056b30cfa",
+ "name": "ansibleSVM"
+ },
+ "enabled": False,
+ "security": {
+ "encrypt_dc_connection": False,
+ "smb_encryption": False,
+ "kdc_encryption": False,
+ "smb_signing": False,
+ "restrict_anonymous": "no_enumeration",
+ "aes_netlogon_enabled": False,
+ "ldap_referral_enabled": False,
+ "session_security": "none",
+ "try_ldap_channel_binding": True,
+ "use_ldaps": False,
+ "use_start_tls": False
+ },
+ "target": {
+ "nam,e": "20:05:00:50:56:b3:0c:fa"
+ },
+ "name": "cifs_server_name"
+ }
+ ],
+ "num_records": 1
+ }, None
+ ),
+ 'cifs_records_renamed': (
+ 200,
+ {
+ "records": [
+ {
+ "svm": {
+ "uuid": "671aa46e-11ad-11ec-a267-005056b30cfa",
+ "name": "ansibleSVM"
+ },
+ "enabled": True,
+ "security": {
+ "encrypt_dc_connection": False,
+ "smb_encryption": False,
+ "kdc_encryption": False,
+ "smb_signing": False,
+ "restrict_anonymous": "no_enumeration",
+ "aes_netlogon_enabled": False,
+ "ldap_referral_enabled": False,
+ "session_security": "none",
+ "try_ldap_channel_binding": True,
+ "use_ldaps": False,
+ "use_start_tls": False
+ },
+ "target": {
+ "name": "20:05:00:50:56:b3:0c:fa"
+ },
+ "name": "cifs"
+ }
+ ],
+ "num_records": 1
+ }, None
+ ),
+ "no_record": (
+ 200,
+ {"num_records": 0},
+ None)
+})
+
+
+cifs_record_info = {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'cifs-server-config': {
+ 'cifs-server': 'cifs_server',
+ 'administrative-status': 'up'}
+ }
+}
+cifs_record_disabled_info = {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'cifs-server-config': {
+ 'cifs-server': 'cifs_server',
+ 'administrative-status': 'down'}
+ }
+}
+
+ZRR = zapi_responses({
+ 'cifs_record_info': build_zapi_response(cifs_record_info),
+ 'cifs_record_disabled_info': build_zapi_response(cifs_record_disabled_info)
+})
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'cifs_server_name': 'cifs_server',
+ 'vserver': 'vserver',
+ 'use_rest': 'never',
+ 'feature_flags': {'no_cserver_ems': True}
+}
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ my_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+
+def test_get():
+ register_responses([
+ ('cifs-server-get-iter', ZRR['cifs_record_info'])
+ ])
+ cifs_obj = create_module(my_module, DEFAULT_ARGS)
+ result = cifs_obj.get_cifs_server()
+ assert result
+
+
+def test_create_unsupport_zapi():
+ """ check for zapi unsupported options """
+ module_args = {
+ "use_rest": "never",
+ "encrypt_dc_connection": "false",
+ "smb_encryption": "false",
+ "kdc_encryption": "false",
+ "smb_signing": "false"
+ }
+ msg = 'Error: smb_signing ,encrypt_dc_connection ,kdc_encryption ,smb_encryption options supported only with REST.'
+ assert msg == create_module(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_create():
+ register_responses([
+ ('cifs-server-get-iter', ZRR['empty']),
+ ('cifs-server-create', ZRR['success'])
+ ])
+ module_args = {
+ 'workgroup': 'test',
+ 'ou': 'ou',
+ 'domain': 'test',
+ 'admin_user_name': 'user1',
+ 'admin_password': 'password'
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_create_with_service_state_started():
+ register_responses([
+ ('cifs-server-get-iter', ZRR['empty']),
+ ('cifs-server-create', ZRR['success']),
+ # idempotent check
+ ('cifs-server-get-iter', ZRR['cifs_record_info'])
+ ])
+ module_args = {
+ 'workgroup': 'test',
+ 'ou': 'ou',
+ 'domain': 'test',
+ 'admin_user_name': 'user1',
+ 'admin_password': 'password',
+ 'service_state': 'started'
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_create_with_service_state_stopped():
+ register_responses([
+ ('cifs-server-get-iter', ZRR['empty']),
+ ('cifs-server-create', ZRR['success']),
+ # idempotent check
+ ('cifs-server-get-iter', ZRR['cifs_record_disabled_info'])
+ ])
+ module_args = {
+ 'workgroup': 'test',
+ 'ou': 'ou',
+ 'domain': 'test',
+ 'admin_user_name': 'user1',
+ 'admin_password': 'password',
+ 'service_state': 'stopped'
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_create_with_force():
+ register_responses([
+ ('cifs-server-get-iter', ZRR['empty']),
+ ('cifs-server-create', ZRR['success']),
+ ])
+ module_args = {
+ 'workgroup': 'test',
+ 'ou': 'ou',
+ 'domain': 'test',
+ 'admin_user_name': 'user1',
+ 'admin_password': 'password',
+ 'force': 'true'
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_create_idempotent():
+ register_responses([
+ ('cifs-server-get-iter', ZRR['cifs_record_info'])
+ ])
+ module_args = {
+ 'state': 'present'
+ }
+ assert not create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_delete_idempotent():
+ register_responses([
+ ('cifs-server-get-iter', ZRR['empty'])
+ ])
+ module_args = {
+ 'state': 'absent'
+ }
+ assert not create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_delete():
+ register_responses([
+ ('cifs-server-get-iter', ZRR['cifs_record_info']),
+ ('cifs-server-delete', ZRR['success']),
+ ])
+ module_args = {
+ 'workgroup': 'test',
+ 'ou': 'ou',
+ 'domain': 'test',
+ 'admin_user_name': 'user1',
+ 'admin_password': 'password',
+ 'force': 'false',
+ 'state': 'absent'
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_start_service_state():
+ register_responses([
+ ('cifs-server-get-iter', ZRR['cifs_record_info']),
+ ('cifs-server-stop', ZRR['success']),
+ ])
+ module_args = {
+ 'service_state': 'stopped'
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)
+
+
+def test_stop_service_state():
+ register_responses([
+ ('cifs-server-get-iter', ZRR['cifs_record_disabled_info']),
+ ('cifs-server-start', ZRR['success']),
+ ])
+ module_args = {
+ 'service_state': 'started'
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)
+
+
+def test_if_all_methods_catch_exception():
+ register_responses([
+ ('cifs-server-create', ZRR['error']),
+ ('cifs-server-start', ZRR['error']),
+ ('cifs-server-stop', ZRR['error']),
+ ('cifs-server-delete', ZRR['error'])
+ ])
+ module_args = {}
+
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+
+ error = expect_and_capture_ansible_exception(my_obj.create_cifs_server, 'fail')['msg']
+ assert 'Error Creating cifs_server cifs_server: NetApp API failed. Reason - 12345:synthetic error for UT purpose' in error
+
+ error = expect_and_capture_ansible_exception(my_obj.start_cifs_server, 'fail')['msg']
+ assert 'Error modifying cifs_server cifs_server: NetApp API failed. Reason - 12345:synthetic error for UT purpose' in error
+
+ error = expect_and_capture_ansible_exception(my_obj.stop_cifs_server, 'fail')['msg']
+ assert 'Error modifying cifs_server cifs_server: NetApp API failed. Reason - 12345:synthetic error for UT purpose' in error
+
+ error = expect_and_capture_ansible_exception(my_obj.delete_cifs_server, 'fail')['msg']
+ assert 'Error deleting cifs_server cifs_server: NetApp API failed. Reason - 12345:synthetic error for UT purpose' in error
+
+
+ARGS_REST = {
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'use_rest': 'always',
+ 'vserver': 'test_vserver',
+ 'name': 'cifs_server_name',
+}
+
+
+def test_rest_error_get():
+ '''Test error rest get'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/cifs/services', SRR['generic_error']),
+ ])
+ error = create_and_apply(my_module, ARGS_REST, fail=True)['msg']
+ assert 'Error on fetching cifs:' in error
+
+
+def test_module_error_ontap_version():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1'])
+ ])
+ module_args = {'use_rest': 'always', 'force': True}
+ error = create_module(my_module, ARGS_REST, module_args, fail=True)['msg']
+ assert 'Minimum version of ONTAP for force is (9, 11)' in error
+
+
+def test_rest_successful_create():
+ '''Test successful rest create'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/cifs/services', SRR['empty_records']),
+ ('POST', 'protocols/cifs/services', SRR['empty_good']),
+ ])
+ assert create_and_apply(my_module, ARGS_REST)
+
+
+def test_rest_successful_create_with_force():
+ '''Test successful rest create'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_0']),
+ ('GET', 'protocols/cifs/services', SRR['empty_records']),
+ ('POST', 'protocols/cifs/services', SRR['empty_good']),
+ ])
+ module_args = {
+ 'force': True
+ }
+ assert create_and_apply(my_module, ARGS_REST, module_args)
+
+
+def test_rest_successful_create_with_user():
+ '''Test successful rest create'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/cifs/services', SRR['empty_records']),
+ ('POST', 'protocols/cifs/services', SRR['empty_good']),
+ # idempotent check.
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/cifs/services', SRR['cifs_record']),
+ ])
+ module_args = {
+ 'admin_user_name': 'test_user',
+ 'admin_password': 'pwd'
+ }
+ assert create_and_apply(my_module, ARGS_REST, module_args)['changed']
+ assert not create_and_apply(my_module, ARGS_REST, module_args)['changed']
+
+
+def test_rest_successful_create_with_service_state():
+ '''Test successful rest create'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/cifs/services', SRR['empty_records']),
+ ('POST', 'protocols/cifs/services', SRR['empty_good']),
+ # idempotent check.
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/cifs/services', SRR['cifs_record_disabled']),
+ ])
+ module_args = {
+ 'admin_user_name': 'test_user',
+ 'admin_password': 'pwd',
+ 'service_state': 'stopped'
+ }
+ assert create_and_apply(my_module, ARGS_REST, module_args)['changed']
+ assert not create_and_apply(my_module, ARGS_REST, module_args)['changed']
+
+
+def test_rest_successful_create_with_ou():
+ '''Test successful rest create'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/cifs/services', SRR['empty_records']),
+ ('POST', 'protocols/cifs/services', SRR['empty_good']),
+ ])
+ module_args = {
+ 'ou': 'ou'
+ }
+ assert create_and_apply(my_module, ARGS_REST, module_args)['changed']
+
+
+def test_rest_successful_create_with_domain():
+ '''Test successful rest create'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/cifs/services', SRR['empty_records']),
+ ('POST', 'protocols/cifs/services', SRR['empty_good']),
+ ])
+ module_args = {
+ 'domain': 'domain'
+ }
+ assert create_and_apply(my_module, ARGS_REST, module_args)['changed']
+
+
+def test_rest_successful_create_with_security():
+ '''Test successful rest create'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'protocols/cifs/services', SRR['empty_records']),
+ ('POST', 'protocols/cifs/services', SRR['empty_good']),
+ ])
+ module_args = {
+ 'smb_encryption': True,
+ 'smb_signing': True,
+ 'kdc_encryption': True,
+ 'encrypt_dc_connection': True,
+ 'restrict_anonymous': 'no_enumeration'
+ }
+ assert create_and_apply(my_module, ARGS_REST, module_args)['changed']
+
+
+def test_rest_version_error_with_security_encryption():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96'])
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'encrypt_dc_connection': True,
+ }
+ error = create_module(my_module, ARGS_REST, module_args, fail=True)['msg']
+ assert 'Minimum version of ONTAP for encrypt_dc_connection is (9, 8)' in error
+
+
+def test_module_error_ontap_version_security():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0'])
+ ])
+ module_args = {
+ "aes_netlogon_enabled": False
+ }
+ error = create_module(my_module, ARGS_REST, module_args, fail=True)['msg']
+ assert 'Minimum version of ONTAP for aes_netlogon_enabled is (9, 10, 1)' in error
+
+
+def test_rest_error_create():
+ '''Test error rest create'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/cifs/services', SRR['empty_records']),
+ ('POST', 'protocols/cifs/services', SRR['generic_error']),
+ ])
+ error = create_and_apply(my_module, ARGS_REST, fail=True)['msg']
+ assert 'Error on creating cifs:' in error
+
+
+def test_delete_rest():
+ ''' Test delete with rest API'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/cifs/services', SRR['cifs_record']),
+ ('DELETE', 'protocols/cifs/services/671aa46e-11ad-11ec-a267-005056b30cfa', SRR['empty_good']),
+ ])
+ module_args = {
+ 'state': 'absent',
+ 'admin_user_name': 'test_user',
+ 'admin_password': 'pwd'
+ }
+ assert create_and_apply(my_module, ARGS_REST, module_args)['changed']
+
+
+def test_delete_with_force_rest():
+ ''' Test delete with rest API'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_0']),
+ ('GET', 'protocols/cifs/services', SRR['cifs_record']),
+ ('DELETE', 'protocols/cifs/services/671aa46e-11ad-11ec-a267-005056b30cfa', SRR['empty_good']),
+ ])
+ module_args = {
+ 'state': 'absent',
+ 'force': True,
+ 'admin_user_name': 'test_user',
+ 'admin_password': 'pwd'
+ }
+ assert create_and_apply(my_module, ARGS_REST, module_args)['changed']
+
+
+def test_error_delete_rest():
+ ''' Test error delete with rest API'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/cifs/services', SRR['cifs_record']),
+ ('DELETE', 'protocols/cifs/services/671aa46e-11ad-11ec-a267-005056b30cfa', SRR['generic_error']),
+ ])
+ module_args = {
+ 'state': 'absent'
+ }
+ error = create_and_apply(my_module, ARGS_REST, module_args, fail=True)['msg']
+ assert 'Error on deleting cifs server:' in error
+
+
+def test_rest_successful_disable():
+ '''Test successful rest disable'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/cifs/services', SRR['cifs_record']),
+ ('PATCH', 'protocols/cifs/services/671aa46e-11ad-11ec-a267-005056b30cfa', SRR['empty_good']),
+ ])
+ module_args = {
+ 'service_state': 'stopped'
+ }
+ assert create_and_apply(my_module, ARGS_REST, module_args)['changed']
+
+
+def test_rest_successful_enable():
+ '''Test successful rest enable'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/cifs/services', SRR['cifs_record_disabled']),
+ ('PATCH', 'protocols/cifs/services/671aa46e-11ad-11ec-a267-005056b30cfa', SRR['empty_good']),
+ ])
+ module_args = {
+ 'service_state': 'started'
+ }
+ assert create_and_apply(my_module, ARGS_REST, module_args)['changed']
+
+
+def test_rest_successful_security_modify():
+ '''Test successful rest enable'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/cifs/services', SRR['cifs_record_disabled']),
+ ('PATCH', 'protocols/cifs/services/671aa46e-11ad-11ec-a267-005056b30cfa', SRR['empty_good']),
+ ])
+ module_args = {
+ 'smb_encryption': True,
+ 'smb_signing': True,
+ 'kdc_encryption': True,
+ 'restrict_anonymous': "no_enumeration"
+ }
+ assert create_and_apply(my_module, ARGS_REST, module_args)['changed']
+
+
+def test_rest_successful_security_modify_encrypt():
+ '''Test successful rest enable'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'protocols/cifs/services', SRR['cifs_record_disabled']),
+ ('PATCH', 'protocols/cifs/services/671aa46e-11ad-11ec-a267-005056b30cfa', SRR['empty_good']),
+ ])
+ module_args = {
+ 'encrypt_dc_connection': True
+ }
+ assert create_and_apply(my_module, ARGS_REST, module_args)['changed']
+
+
+def test_rest_negative_security_options_modify():
+ '''Test error rest enable'''
+ register_responses([
+ ])
+ module_args = {
+ "aes_netlogon_enabled": True,
+ "ldap_referral_enabled": True,
+ "session_security": "seal",
+ "try_ldap_channel_binding": False,
+ "use_ldaps": True,
+ "use_start_tls": True
+ }
+ msg = 'parameters are mutually exclusive: use_ldaps|use_start_tls'
+ assert msg in create_module(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_rest_successful_security_options_modify():
+ '''Test successful rest enable'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/cifs/services', SRR['cifs_record_disabled']),
+ ('PATCH', 'protocols/cifs/services/671aa46e-11ad-11ec-a267-005056b30cfa', SRR['empty_good']),
+ ])
+ module_args = {
+ "aes_netlogon_enabled": True,
+ "ldap_referral_enabled": True,
+ "session_security": "seal",
+ "try_ldap_channel_binding": False,
+ "use_ldaps": True
+ }
+ assert create_and_apply(my_module, ARGS_REST, module_args)['changed']
+
+
+def test_rest_successful_rename_cifs():
+ '''Test successful rest rename'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_0']),
+ ('GET', 'protocols/cifs/services', SRR['empty_records']),
+ ('GET', 'protocols/cifs/services', SRR['cifs_record_disabled']),
+ ('PATCH', 'protocols/cifs/services/671aa46e-11ad-11ec-a267-005056b30cfa', SRR['empty_good'])
+ ])
+ module_args = {
+ 'from_name': 'cifs_server_name',
+ 'name': 'cifs',
+ 'force': True,
+ 'admin_user_name': 'test_user',
+ 'admin_password': 'pwd'
+ }
+ assert create_and_apply(my_module, ARGS_REST, module_args)['changed']
+
+
+def test_rest_successful_rename_modify_cifs():
+ '''Test successful rest rename'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_0']),
+ ('GET', 'protocols/cifs/services', SRR['empty_records']),
+ ('GET', 'protocols/cifs/services', SRR['cifs_record']),
+ ('PATCH', 'protocols/cifs/services/671aa46e-11ad-11ec-a267-005056b30cfa', SRR['empty_good']),
+ ('PATCH', 'protocols/cifs/services/671aa46e-11ad-11ec-a267-005056b30cfa', SRR['empty_good'])
+ ])
+ module_args = {
+ 'from_name': 'cifs_server_name',
+ 'name': 'cifs',
+ 'force': True,
+ 'admin_user_name': 'test_user',
+ 'admin_password': 'pwd',
+ 'service_state': 'stopped'
+ }
+ assert create_and_apply(my_module, ARGS_REST, module_args)['changed']
+
+
+def test_error_rest_rename_cifs_without_force():
+ '''Test error rest rename with force false'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_0']),
+ ('GET', 'protocols/cifs/services', SRR['empty_records']),
+ ('GET', 'protocols/cifs/services', SRR['cifs_record']),
+ ])
+ module_args = {
+ 'from_name': 'cifs_servers',
+ 'name': 'cifs1',
+ 'force': False,
+ 'admin_user_name': 'test_user',
+ 'admin_password': 'pwd'
+ }
+ error = create_and_apply(my_module, ARGS_REST, module_args, fail=True)['msg']
+ assert 'Error: cannot rename cifs server from cifs_servers to cifs1 without force.' in error
+
+
+def test_error_rest_rename_error_state():
+ '''Test error rest rename with service state as started'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_0']),
+ ('GET', 'protocols/cifs/services', SRR['empty_records']),
+ ('GET', 'protocols/cifs/services', SRR['cifs_record']),
+ ('PATCH', 'protocols/cifs/services/671aa46e-11ad-11ec-a267-005056b30cfa', SRR['generic_error']),
+ ])
+ module_args = {
+ 'from_name': 'cifs_servers',
+ 'name': 'cifs1',
+ 'force': True,
+ 'admin_user_name': 'test_user',
+ 'admin_password': 'pwd',
+ 'service_state': 'started'
+ }
+ error = create_and_apply(my_module, ARGS_REST, module_args, fail=True)['msg']
+ msg = 'Error on modifying cifs server: calling: protocols/cifs/services/671aa46e-11ad-11ec-a267-005056b30cfa:'
+ assert msg in error
+
+
+def test_error_rest_rename_cifs():
+ '''Test error rest rename'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_0']),
+ ('GET', 'protocols/cifs/services', SRR['empty_records']),
+ ('GET', 'protocols/cifs/services', SRR['empty_records']),
+ ])
+ module_args = {
+ 'from_name': 'cifs_servers_test',
+ 'name': 'cifs1',
+ 'force': True,
+ 'admin_user_name': 'test_user',
+ 'admin_password': 'pwd'
+ }
+ error = create_and_apply(my_module, ARGS_REST, module_args, fail=True)['msg']
+ assert 'Error renaming cifs server: cifs1 - no cifs server with from_name: cifs_servers_test' in error
+
+
+def test_rest_error_disable():
+ '''Test error rest disable'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/cifs/services', SRR['cifs_record']),
+ ('PATCH', 'protocols/cifs/services/671aa46e-11ad-11ec-a267-005056b30cfa', SRR['generic_error']),
+ ])
+ module_args = {
+ 'service_state': 'stopped'
+ }
+ error = create_and_apply(my_module, ARGS_REST, module_args, fail=True)['msg']
+ assert 'Error on modifying cifs server:' in error
+
+
+def test_rest_successful_create_idempotency():
+ '''Test successful rest create'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/cifs/services', SRR['cifs_record'])
+ ])
+ module_args = {'use_rest': 'always'}
+ assert create_and_apply(my_module, ARGS_REST, module_args)['changed'] is False
+
+
+def test_rest_successful_delete_idempotency():
+ '''Test successful rest delete'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/cifs/services', SRR['empty_records'])
+ ])
+ module_args = {'use_rest': 'always', 'state': 'absent'}
+ assert create_and_apply(my_module, ARGS_REST, module_args)['changed'] is False
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cluster.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cluster.py
new file mode 100644
index 000000000..89fe069a3
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cluster.py
@@ -0,0 +1,688 @@
+# (c) 2018, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests ONTAP Ansible module: na_ontap_cluster '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat import unittest
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch, Mock, call
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_cluster \
+ import NetAppONTAPCluster as my_module # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+class MockONTAPConnection(object):
+ ''' mock server connection to ONTAP host '''
+
+ def __init__(self, kind=None):
+ ''' save arguments '''
+ self.type = kind
+ self.xml_in = None
+ self.xml_out = None
+
+ def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
+ ''' mock invoke_successfully returning xml data '''
+ self.xml_in = xml
+ if self.type == 'cluster':
+ xml = self.build_cluster_info()
+ if self.type == 'cluster_success':
+ xml = self.build_cluster_info_success()
+ elif self.type == 'cluster_add':
+ xml = self.build_add_node_info()
+ elif self.type == 'cluster_extra_input':
+ self.type = 'cluster' # success on second call
+ raise netapp_utils.zapi.NaApiError(code='TEST1', message="Extra input: single-node-cluster")
+ elif self.type == 'cluster_extra_input_loop':
+ raise netapp_utils.zapi.NaApiError(code='TEST2', message="Extra input: single-node-cluster")
+ elif self.type == 'cluster_extra_input_other':
+ raise netapp_utils.zapi.NaApiError(code='TEST3', message="Extra input: other-unexpected-element")
+ elif self.type == 'cluster_fail':
+ raise netapp_utils.zapi.NaApiError(code='TEST4', message="This exception is from the unit test")
+ self.xml_out = xml
+ return xml
+
+ def autosupport_log(self):
+ ''' mock autosupport log'''
+ return None
+
+ @staticmethod
+ def build_cluster_info():
+ ''' build xml data for cluster-create-join-progress-info '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ attributes = {
+ 'attributes': {
+ 'cluster-create-join-progress-info': {
+ 'is-complete': 'true',
+ 'status': 'whatever'
+ }
+ }
+ }
+ xml.translate_struct(attributes)
+ return xml
+
+ @staticmethod
+ def build_cluster_info_success():
+ ''' build xml data for cluster-create-join-progress-info '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ attributes = {
+ 'attributes': {
+ 'cluster-create-join-progress-info': {
+ 'is-complete': 'false',
+ 'status': 'success'
+ }
+ }
+ }
+ xml.translate_struct(attributes)
+ return xml
+
+ @staticmethod
+ def build_add_node_info():
+ ''' build xml data for cluster-create-add-node-status-info '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ attributes = {
+ 'attributes-list': {
+ 'cluster-create-add-node-status-info': {
+ 'failure-msg': '',
+ 'status': 'success'
+ }
+ }
+ }
+ xml.translate_struct(attributes)
+ return xml
+
+
+class TestMyModule(unittest.TestCase):
+ ''' a group of related Unit Tests '''
+
+ def setUp(self):
+ self.server = MockONTAPConnection()
+ self.use_vsim = False
+
+ def set_default_args(self, use_rest='never'):
+ hostname = '10.10.10.10'
+ username = 'admin'
+ password = 'password'
+ cluster_name = 'abc'
+ return dict({
+ 'hostname': hostname,
+ 'username': username,
+ 'password': password,
+ 'cluster_name': cluster_name,
+ 'use_rest': use_rest
+ })
+
+ def test_module_fail_when_required_args_missing(self):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ my_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+ @patch('time.sleep')
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_cluster.NetAppONTAPCluster.get_cluster_identity')
+ def test_ensure_apply_for_cluster_called(self, get_cl_id, sleep_mock):
+ ''' creating cluster and checking idempotency '''
+ get_cl_id.return_value = None
+ module_args = {}
+ module_args.update(self.set_default_args())
+ set_module_args(module_args)
+ my_obj = my_module()
+ my_obj.autosupport_log = Mock(return_value=None)
+ if not self.use_vsim:
+ my_obj.server = MockONTAPConnection('cluster')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: test_cluster_apply: %s' % repr(exc.value))
+ assert exc.value.args[0]['changed']
+
+ @patch('time.sleep')
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_cluster.NetAppONTAPCluster.get_cluster_identity')
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_cluster.NetAppONTAPCluster.create_cluster')
+ def test_cluster_create_called(self, cluster_create, get_cl_id, sleep_mock):
+ ''' creating cluster'''
+ get_cl_id.return_value = None
+ module_args = {}
+ module_args.update(self.set_default_args())
+ set_module_args(module_args)
+ my_obj = my_module()
+ my_obj.autosupport_log = Mock(return_value=None)
+ if not self.use_vsim:
+ my_obj.server = MockONTAPConnection('cluster_success')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: test_cluster_apply: %s' % repr(exc.value))
+ cluster_create.assert_called_with()
+
+ @patch('time.sleep')
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_cluster.NetAppONTAPCluster.get_cluster_identity')
+ def test_cluster_create_old_api(self, get_cl_id, sleep_mock):
+ ''' creating cluster'''
+ get_cl_id.return_value = None
+ module_args = {}
+ module_args.update(self.set_default_args())
+ set_module_args(module_args)
+ my_obj = my_module()
+ my_obj.autosupport_log = Mock(return_value=None)
+ if not self.use_vsim:
+ my_obj.server = MockONTAPConnection('cluster_extra_input')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: test_cluster_apply: %s' % repr(exc.value))
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_cluster.NetAppONTAPCluster.get_cluster_identity')
+ def test_cluster_create_old_api_loop(self, get_cl_id):
+ ''' creating cluster'''
+ get_cl_id.return_value = None
+ module_args = {}
+ module_args.update(self.set_default_args())
+ set_module_args(module_args)
+ my_obj = my_module()
+ my_obj.autosupport_log = Mock(return_value=None)
+ if not self.use_vsim:
+ my_obj.server = MockONTAPConnection('cluster_extra_input_loop')
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ msg = 'TEST2:Extra input: single-node-cluster'
+ print('Info: test_cluster_apply: %s' % repr(exc.value))
+ assert msg in exc.value.args[0]['msg']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_cluster.NetAppONTAPCluster.get_cluster_identity')
+ def test_cluster_create_old_api_other_extra(self, get_cl_id):
+ ''' creating cluster'''
+ get_cl_id.return_value = None
+ module_args = {}
+ module_args.update(self.set_default_args())
+ set_module_args(module_args)
+ my_obj = my_module()
+ my_obj.autosupport_log = Mock(return_value=None)
+ if not self.use_vsim:
+ my_obj.server = MockONTAPConnection('cluster_extra_input_other')
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ msg = 'TEST3:Extra input: other-unexpected-element'
+ print('Info: test_cluster_apply: %s' % repr(exc.value))
+ assert msg in exc.value.args[0]['msg']
+
+ @patch('time.sleep')
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_cluster.NetAppONTAPCluster.get_cluster_ip_addresses')
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_cluster.NetAppONTAPCluster.get_cluster_identity')
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_cluster.NetAppONTAPCluster.add_node')
+ def test_add_node_called(self, add_node, get_cl_id, get_cl_ips, sleep_mock):
+ ''' creating add_node'''
+ get_cl_ips.return_value = []
+ get_cl_id.return_value = None
+ data = self.set_default_args()
+ del data['cluster_name']
+ data['cluster_ip_address'] = '10.10.10.10'
+ set_module_args(data)
+ my_obj = my_module()
+ my_obj.autosupport_log = Mock(return_value=None)
+ if not self.use_vsim:
+ my_obj.server = MockONTAPConnection('cluster_add')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: test_cluster_apply: %s' % repr(exc.value))
+ add_node.assert_called_with()
+ assert exc.value.args[0]['changed']
+
+ def test_if_all_methods_catch_exception(self):
+ module_args = {}
+ module_args.update(self.set_default_args())
+ set_module_args(module_args)
+ my_obj = my_module()
+ if not self.use_vsim:
+ my_obj.server = MockONTAPConnection('cluster_fail')
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.create_cluster()
+ assert 'Error creating cluster' in exc.value.args[0]['msg']
+ data = self.set_default_args()
+ data['cluster_ip_address'] = '10.10.10.10'
+ set_module_args(data)
+ my_obj = my_module()
+ if not self.use_vsim:
+ my_obj.server = MockONTAPConnection('cluster_fail')
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.add_node()
+ assert 'Error adding node with ip' in exc.value.args[0]['msg']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_cluster.NetAppONTAPCluster.get_cluster_ip_addresses')
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_cluster.NetAppONTAPCluster.get_cluster_identity')
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_cluster.NetAppONTAPCluster.add_node')
+ def test_add_node_idempotent(self, add_node, get_cl_id, get_cl_ips):
+ ''' creating add_node'''
+ get_cl_ips.return_value = ['10.10.10.10']
+ get_cl_id.return_value = None
+ data = self.set_default_args()
+ del data['cluster_name']
+ data['cluster_ip_address'] = '10.10.10.10'
+ set_module_args(data)
+ my_obj = my_module()
+ my_obj.autosupport_log = Mock(return_value=None)
+ if not self.use_vsim:
+ my_obj.server = MockONTAPConnection('cluster_add')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: test_cluster_apply: %s' % repr(exc.value))
+ try:
+ add_node.assert_not_called()
+ except AttributeError:
+ # not supported with python <= 3.4
+ pass
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_cluster.NetAppONTAPCluster.get_cluster_ip_addresses')
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_cluster.NetAppONTAPCluster.get_cluster_identity')
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_cluster.NetAppONTAPCluster.remove_node')
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_cluster.NetAppONTAPCluster.node_remove_wait')
+ def test_remove_node_ip(self, wait, remove_node, get_cl_id, get_cl_ips):
+ ''' creating add_node'''
+ get_cl_ips.return_value = ['10.10.10.10']
+ get_cl_id.return_value = None
+ wait.return_value = None
+ data = self.set_default_args()
+ # del data['cluster_name']
+ data['cluster_ip_address'] = '10.10.10.10'
+ data['state'] = 'absent'
+ set_module_args(data)
+ my_obj = my_module()
+ my_obj.autosupport_log = Mock(return_value=None)
+ if not self.use_vsim:
+ my_obj.server = MockONTAPConnection('cluster_add')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: test_cluster_apply: %s' % repr(exc.value))
+ remove_node.assert_called_with()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_cluster.NetAppONTAPCluster.get_cluster_ip_addresses')
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_cluster.NetAppONTAPCluster.get_cluster_identity')
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_cluster.NetAppONTAPCluster.remove_node')
+ def test_remove_node_ip_idempotent(self, remove_node, get_cl_id, get_cl_ips):
+ ''' creating add_node'''
+ get_cl_ips.return_value = []
+ get_cl_id.return_value = None
+ data = self.set_default_args()
+ # del data['cluster_name']
+ data['cluster_ip_address'] = '10.10.10.10'
+ data['state'] = 'absent'
+ set_module_args(data)
+ my_obj = my_module()
+ my_obj.autosupport_log = Mock(return_value=None)
+ if not self.use_vsim:
+ my_obj.server = MockONTAPConnection('cluster_add')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: test_cluster_apply: %s' % repr(exc.value))
+ try:
+ remove_node.assert_not_called()
+ except AttributeError:
+ # not supported with python <= 3.4
+ pass
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_cluster.NetAppONTAPCluster.get_cluster_nodes')
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_cluster.NetAppONTAPCluster.get_cluster_identity')
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_cluster.NetAppONTAPCluster.remove_node')
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_cluster.NetAppONTAPCluster.node_remove_wait')
+ def test_remove_node_name(self, wait, remove_node, get_cl_id, get_cl_nodes):
+ ''' creating add_node'''
+ get_cl_nodes.return_value = ['node1', 'node2']
+ get_cl_id.return_value = None
+ wait.return_value = None
+ data = self.set_default_args()
+ # del data['cluster_name']
+ data['node_name'] = 'node2'
+ data['state'] = 'absent'
+ data['force'] = True
+ set_module_args(data)
+ my_obj = my_module()
+ my_obj.autosupport_log = Mock(return_value=None)
+ if not self.use_vsim:
+ my_obj.server = MockONTAPConnection('cluster_add')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: test_cluster_apply: %s' % repr(exc.value))
+ remove_node.assert_called_with()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_cluster.NetAppONTAPCluster.get_cluster_nodes')
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_cluster.NetAppONTAPCluster.get_cluster_identity')
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_cluster.NetAppONTAPCluster.remove_node')
+ def test_remove_node_name_idempotent(self, remove_node, get_cl_id, get_cl_nodes):
+ ''' creating add_node'''
+ get_cl_nodes.return_value = ['node1', 'node2']
+ get_cl_id.return_value = None
+ data = self.set_default_args()
+ # del data['cluster_name']
+ data['node_name'] = 'node3'
+ data['state'] = 'absent'
+ set_module_args(data)
+ my_obj = my_module()
+ my_obj.autosupport_log = Mock(return_value=None)
+ if not self.use_vsim:
+ my_obj.server = MockONTAPConnection('cluster_add')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: test_cluster_apply: %s' % repr(exc.value))
+ try:
+ remove_node.assert_not_called()
+ except AttributeError:
+ # not supported with python <= 3.4
+ pass
+ assert not exc.value.args[0]['changed']
+
+ def test_remove_node_name_and_id(self):
+ ''' creating add_node'''
+ data = self.set_default_args()
+ # del data['cluster_name']
+ data['cluster_ip_address'] = '10.10.10.10'
+ data['node_name'] = 'node3'
+ data['state'] = 'absent'
+ set_module_args(data)
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_module()
+ print('Info: test_remove_node_name_and_id: %s' % repr(exc.value))
+ msg = 'when state is "absent", parameters are mutually exclusive: cluster_ip_address|node_name'
+ assert msg in exc.value.args[0]['msg']
+
+
+SRR = {
+ # common responses
+ 'is_rest': (200, dict(version=dict(generation=9, major=7, minor=0, full='dummy_9_7_0')), None),
+ 'is_rest_95': (200, dict(version=dict(generation=9, major=5, minor=0, full='dummy_9_5_0')), None),
+ 'is_rest_96': (200, dict(version=dict(generation=9, major=6, minor=0, full='dummy_9_6_0')), None),
+ 'is_rest_97': (200, dict(version=dict(generation=9, major=7, minor=0, full='dummy_9_7_0')), None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': ({}, None, None),
+ 'zero_record': (200, {'records': []}, None),
+ 'precluster': (500, None, {'message': 'are available in precluster.'}),
+ 'cluster_identity': (200, {'location': 'Oz', 'name': 'abc'}, None),
+ 'nodes': (200, {'records': [
+ {'name': 'node2', 'uuid': 'uuid2', 'cluster_interfaces': [{'ip': {'address': '10.10.10.2'}}]}
+ ]}, None),
+ 'end_of_sequence': (None, None, "Unexpected call to send_request"),
+ 'generic_error': (None, "Expected error"),
+}
+
+
+def set_default_args(use_rest='auto'):
+ hostname = '10.10.10.10'
+ username = 'admin'
+ password = 'password'
+ cluster_name = 'abc'
+ return dict({
+ 'hostname': hostname,
+ 'username': username,
+ 'password': password,
+ 'cluster_name': cluster_name,
+ 'use_rest': use_rest
+ })
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_create(mock_request, patch_ansible):
+ ''' create cluster '''
+ args = dict(set_default_args())
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['precluster'], # get
+ SRR['empty_good'], # post
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is True
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 3
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_create_timezone(mock_request, patch_ansible):
+ ''' create cluster '''
+ args = dict(set_default_args())
+ args['timezone'] = {'name': 'America/Los_Angeles'}
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['precluster'], # get
+ SRR['empty_good'], # post
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is True
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 3
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_create_single(mock_request, patch_ansible):
+ ''' create cluster '''
+ args = dict(set_default_args())
+ args['single_node_cluster'] = True
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['precluster'], # get
+ SRR['empty_good'], # post
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is True
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 3
+ post_call = call('POST', 'cluster', {'return_timeout': 30, 'single_node_cluster': True}, json={'name': 'abc'}, headers=None, files=None)
+ assert post_call in mock_request.mock_calls
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_modify(mock_request, patch_ansible):
+ ''' modify cluster location '''
+ args = dict(set_default_args())
+ args['cluster_location'] = 'Mars'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['cluster_identity'], # get
+ SRR['empty_good'], # post
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print(mock_request.mock_calls)
+ assert exc.value.args[0]['changed'] is True
+ assert len(mock_request.mock_calls) == 3
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_modify_timezone(mock_request, patch_ansible):
+ ''' modify cluster location '''
+ args = dict(set_default_args())
+ args['timezone'] = {'name': 'America/Los_Angeles'}
+ args['cluster_location'] = 'Mars'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['cluster_identity'], # get
+ SRR['empty_good'], # post
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print(mock_request.mock_calls)
+ assert exc.value.args[0]['changed'] is True
+ assert len(mock_request.mock_calls) == 3
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_modify_idempotent(mock_request, patch_ansible):
+ ''' modify cluster location '''
+ args = dict(set_default_args())
+ args['cluster_location'] = 'Oz'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['cluster_identity'], # get
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print(mock_request.mock_calls)
+ assert exc.value.args[0]['changed'] is False
+ assert len(mock_request.mock_calls) == 2
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_add_node(mock_request, patch_ansible):
+ ''' modify cluster location '''
+ args = dict(set_default_args())
+ args['node_name'] = 'node2'
+ args['cluster_ip_address'] = '10.10.10.2'
+
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['cluster_identity'], # get
+ SRR['zero_record'], # get nodes
+ SRR['empty_good'], # post
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print(mock_request.mock_calls)
+ assert exc.value.args[0]['changed'] is True
+ assert len(mock_request.mock_calls) == 4
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_remove_node_by_ip(mock_request, patch_ansible):
+ ''' modify cluster location '''
+ args = dict(set_default_args())
+ # args['node_name'] = 'node2'
+ args['cluster_ip_address'] = '10.10.10.2'
+ args['state'] = 'absent'
+
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['cluster_identity'], # get
+ SRR['nodes'], # get nodes
+ SRR['empty_good'], # post
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print(mock_request.mock_calls)
+ assert exc.value.args[0]['changed'] is True
+ assert len(mock_request.mock_calls) == 4
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_remove_node_by_ip_idem(mock_request, patch_ansible):
+ ''' modify cluster location '''
+ args = dict(set_default_args())
+ # args['node_name'] = 'node2'
+ args['cluster_ip_address'] = '10.10.10.3'
+ args['state'] = 'absent'
+
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['cluster_identity'], # get
+ SRR['nodes'], # get nodes
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print(mock_request.mock_calls)
+ assert exc.value.args[0]['changed'] is False
+ assert len(mock_request.mock_calls) == 3
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_remove_node_by_name(mock_request, patch_ansible):
+ ''' modify cluster location '''
+ args = dict(set_default_args())
+ args['node_name'] = 'node2'
+ # args['cluster_ip_address'] = '10.10.10.2'
+ args['state'] = 'absent'
+ args['force'] = True
+
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['cluster_identity'], # get
+ SRR['nodes'], # get nodes
+ SRR['empty_good'], # post
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print(mock_request.mock_calls)
+ assert exc.value.args[0]['changed'] is True
+ assert len(mock_request.mock_calls) == 4
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_remove_node_by_name_idem(mock_request, patch_ansible):
+ ''' modify cluster location '''
+ args = dict(set_default_args())
+ args['node_name'] = 'node3'
+ # args['cluster_ip_address'] = '10.10.10.2'
+ args['state'] = 'absent'
+
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['cluster_identity'], # get
+ SRR['nodes'], # get nodes
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print(mock_request.mock_calls)
+ assert exc.value.args[0]['changed'] is False
+ assert len(mock_request.mock_calls) == 3
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_remove_node_by_name_rest_96(mock_request, patch_ansible):
+ ''' revert to ZAPI for 9.6 '''
+ args = dict(set_default_args())
+ args['node_name'] = 'node3'
+ # args['cluster_ip_address'] = '10.10.10.2'
+ args['state'] = 'absent'
+
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_96'],
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ # revert to ZAPI for 9.6
+ assert not my_obj.use_rest
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cluster_ha.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cluster_ha.py
new file mode 100644
index 000000000..a03f5c5aa
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cluster_ha.py
@@ -0,0 +1,140 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests for ONTAP Ansible module: na_ontap_cluster_ha '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+import sys
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import patch_ansible,\
+ create_module, create_and_apply, expect_and_capture_ansible_exception
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke,\
+ register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_responses
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_cluster_ha \
+ import NetAppOntapClusterHA as cluster_ha # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not be available')
+
+DEFAULT_ARGS = {
+ 'hostname': '10.10.10.10',
+ 'username': 'user',
+ 'password': 'pass',
+ 'state': 'present',
+ 'use_rest': 'never'
+}
+
+cluster_ha_enabled = {
+ 'attributes': {
+ 'cluster-ha-info': {'ha-configured': 'true'}
+ }
+}
+
+cluster_ha_disabled = {
+ 'attributes': {
+ 'cluster-ha-info': {'ha-configured': 'false'}
+ }
+}
+
+
+ZRR = zapi_responses({
+ 'cluster_ha_enabled': build_zapi_response(cluster_ha_enabled),
+ 'cluster_ha_disabled': build_zapi_response(cluster_ha_disabled)
+})
+
+
+SRR = rest_responses({
+ 'cluster_ha_enabled': (200, {"records": [{
+ 'configured': True
+ }], "num_records": 1}, None),
+ 'cluster_ha_disabled': (200, {"records": [{
+ 'configured': False
+ }], "num_records": 1}, None)
+})
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ # with python 2.6, dictionaries are not ordered
+ fragments = ["missing required arguments:", "hostname"]
+ error = create_module(cluster_ha, {}, fail=True)['msg']
+ for fragment in fragments:
+ assert fragment in error
+
+
+def test_enable_cluster_ha():
+ ''' enable cluster ha '''
+ register_responses([
+ ('cluster-ha-get', ZRR['cluster_ha_disabled']),
+ ('cluster-ha-modify', ZRR['success']),
+ ('cluster-ha-get', ZRR['cluster_ha_enabled'])
+ ])
+ assert create_and_apply(cluster_ha, DEFAULT_ARGS)['changed']
+ assert not create_and_apply(cluster_ha, DEFAULT_ARGS)['changed']
+
+
+def test_disable_cluster_ha():
+ ''' disable cluster ha '''
+ register_responses([
+ ('cluster-ha-get', ZRR['cluster_ha_enabled']),
+ ('cluster-ha-modify', ZRR['success']),
+ ('cluster-ha-get', ZRR['cluster_ha_disabled']),
+ ])
+ assert create_and_apply(cluster_ha, DEFAULT_ARGS, {'state': 'absent'})['changed']
+ assert not create_and_apply(cluster_ha, DEFAULT_ARGS, {'state': 'absent'})['changed']
+
+
+def test_if_all_methods_catch_exception():
+ register_responses([
+ ('cluster-ha-get', ZRR['error']),
+ ('cluster-ha-modify', ZRR['error']),
+ ('cluster-ha-modify', ZRR['error']),
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'private/cli/cluster/ha', SRR['generic_error']),
+ ('PATCH', 'private/cli/cluster/ha', SRR['generic_error']),
+ ('PATCH', 'private/cli/cluster/ha', SRR['generic_error'])
+ ])
+ ha_obj = create_module(cluster_ha, DEFAULT_ARGS)
+ assert 'Error fetching cluster HA' in expect_and_capture_ansible_exception(ha_obj.get_cluster_ha_enabled, 'fail')['msg']
+ assert 'Error modifying cluster HA to true' in expect_and_capture_ansible_exception(ha_obj.modify_cluster_ha, 'fail', 'true')['msg']
+ assert 'Error modifying cluster HA to false' in expect_and_capture_ansible_exception(ha_obj.modify_cluster_ha, 'fail', 'false')['msg']
+
+ ucm_obj = create_module(cluster_ha, DEFAULT_ARGS, {'use_rest': 'always'})
+ assert 'Error fetching cluster HA' in expect_and_capture_ansible_exception(ucm_obj.get_cluster_ha_enabled, 'fail')['msg']
+ assert 'Error modifying cluster HA to true' in expect_and_capture_ansible_exception(ucm_obj.modify_cluster_ha, 'fail', 'true')['msg']
+ assert 'Error modifying cluster HA to false' in expect_and_capture_ansible_exception(ucm_obj.modify_cluster_ha, 'fail', 'false')['msg']
+
+
+def test_enable_cluster_ha_rest():
+ ''' enable cluster ha in rest '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'private/cli/cluster/ha', SRR['cluster_ha_disabled']),
+ ('PATCH', 'private/cli/cluster/ha', SRR['success']),
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'private/cli/cluster/ha', SRR['cluster_ha_enabled'])
+ ])
+ assert create_and_apply(cluster_ha, DEFAULT_ARGS, {'use_rest': 'always'})['changed']
+ assert not create_and_apply(cluster_ha, DEFAULT_ARGS, {'use_rest': 'always'})['changed']
+
+
+def test_disable_cluster_ha_rest():
+ ''' disable cluster ha in rest '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'private/cli/cluster/ha', SRR['cluster_ha_enabled']),
+ ('PATCH', 'private/cli/cluster/ha', SRR['success']),
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'private/cli/cluster/ha', SRR['cluster_ha_disabled']),
+ ])
+ args = {'use_rest': 'always', 'state': 'absent'}
+ assert create_and_apply(cluster_ha, DEFAULT_ARGS, args)['changed']
+ assert not create_and_apply(cluster_ha, DEFAULT_ARGS, args)['changed']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cluster_peer.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cluster_peer.py
new file mode 100644
index 000000000..7551a619e
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_cluster_peer.py
@@ -0,0 +1,305 @@
+''' unit tests ONTAP Ansible module: na_ontap_cluster_peer '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+import sys
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, patch_ansible, create_and_apply
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke,\
+ register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_cluster_peer \
+ import NetAppONTAPClusterPeer as my_module # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+
+def update_cluster_peer_info_zapi(cluster_name, peer_addresses):
+ return {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'cluster-peer-info': {
+ 'cluster-name': cluster_name,
+ 'peer-addresses': peer_addresses
+ }
+ }
+ }
+
+
+ZRR = zapi_responses({
+ 'cluster_peer_info_source': build_zapi_response(update_cluster_peer_info_zapi('cluster1', '1.2.3.6,1.2.3.7')),
+ 'cluster_peer_info_remote': build_zapi_response(update_cluster_peer_info_zapi('cluster2', '1.2.3.4,1.2.3.5'))
+})
+
+
+DEFAULT_ARGS_ZAPI = {
+ 'source_intercluster_lifs': '1.2.3.4,1.2.3.5',
+ 'dest_intercluster_lifs': '1.2.3.6,1.2.3.7',
+ 'passphrase': 'netapp123',
+ 'dest_hostname': '10.20.30.40',
+ 'dest_cluster_name': 'cluster2',
+ 'encryption_protocol_proposed': 'none',
+ 'ipspace': 'Default',
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'use_rest': 'never',
+ 'feature_flags': {'no_cserver_ems': True}
+}
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ my_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+
+def test_successful_create():
+ ''' Test successful create '''
+ register_responses([
+ ('cluster-peer-get-iter', ZRR['empty']),
+ ('cluster-peer-get-iter', ZRR['empty']),
+ ('cluster-peer-create', ZRR['empty']),
+ ('cluster-peer-create', ZRR['empty'])
+ ])
+ assert create_and_apply(my_module, DEFAULT_ARGS_ZAPI)
+
+
+def test_create_idempotency():
+ ''' Test create idempotency '''
+ register_responses([
+ ('cluster-peer-get-iter', ZRR['cluster_peer_info_source']),
+ ('cluster-peer-get-iter', ZRR['cluster_peer_info_remote'])
+ ])
+ assert create_and_apply(my_module, DEFAULT_ARGS_ZAPI)
+
+
+def test_successful_delete():
+ ''' Test delete existing cluster peer '''
+ module_args = {
+ 'state': 'absent',
+ 'source_cluster_name': 'cluster1'
+ }
+ register_responses([
+ ('cluster-peer-get-iter', ZRR['cluster_peer_info_source']),
+ ('cluster-peer-get-iter', ZRR['cluster_peer_info_remote']),
+ ('cluster-peer-delete', ZRR['empty']),
+ ('cluster-peer-delete', ZRR['empty'])
+ ])
+ assert create_and_apply(my_module, DEFAULT_ARGS_ZAPI, module_args)
+
+
+def test_delete_idempotency():
+ ''' Test delete idempotency '''
+ module_args = {
+ 'state': 'absent',
+ 'source_cluster_name': 'cluster1'
+ }
+ register_responses([
+ ('cluster-peer-get-iter', ZRR['empty']),
+ ('cluster-peer-get-iter', ZRR['empty'])
+ ])
+ assert create_and_apply(my_module, DEFAULT_ARGS_ZAPI, module_args)
+
+
+def test_error_get_cluster_peer():
+ ''' Test get error '''
+ register_responses([
+ ('cluster-peer-get-iter', ZRR['error']),
+ ])
+ error = create_and_apply(my_module, DEFAULT_ARGS_ZAPI, fail=True)['msg']
+ assert 'Error fetching cluster peer source: NetApp API failed. Reason - 12345:synthetic error for UT purpose' == error
+
+
+def test_error_delete_cluster_peer():
+ ''' Test delete error '''
+ module_args = {
+ 'state': 'absent',
+ 'source_cluster_name': 'cluster1'
+ }
+ register_responses([
+ ('cluster-peer-get-iter', ZRR['cluster_peer_info_source']),
+ ('cluster-peer-get-iter', ZRR['cluster_peer_info_remote']),
+ ('cluster-peer-delete', ZRR['error'])
+ ])
+ error = create_and_apply(my_module, DEFAULT_ARGS_ZAPI, module_args, fail=True)['msg']
+ assert 'Error deleting cluster peer cluster2: NetApp API failed. Reason - 12345:synthetic error for UT purpose' == error
+
+
+def test_error_create_cluster_peer():
+ ''' Test create error '''
+ register_responses([
+ ('cluster-peer-get-iter', ZRR['empty']),
+ ('cluster-peer-get-iter', ZRR['empty']),
+ ('cluster-peer-create', ZRR['error'])
+ ])
+ error = create_and_apply(my_module, DEFAULT_ARGS_ZAPI, fail=True)['msg']
+ assert 'Error creating cluster peer [\'1.2.3.6\', \'1.2.3.7\']: NetApp API failed. Reason - 12345:synthetic error for UT purpose' == error
+
+
+SRR = rest_responses({
+ 'cluster_peer_dst': (200, {"records": [
+ {
+ "uuid": "1e698aba-2aa6-11ec-b7be-005056b366e1",
+ "name": "mohan9cluster2",
+ "remote": {
+ "name": "mohan9cluster2",
+ "serial_number": "1-80-000011",
+ "ip_addresses": ["10.193.179.180"]
+ }
+ }
+ ], "num_records": 1}, None),
+ 'cluster_peer_src': (200, {"records": [
+ {
+ "uuid": "1fg98aba-2aa6-11ec-b7be-005fgvb366e1",
+ "name": "mohanontap98cluster",
+ "remote": {
+ "name": "mohanontap98cluster",
+ "serial_number": "1-80-000031",
+ "ip_addresses": ["10.193.179.57"]
+ }
+ }
+ ], "num_records": 1}, None),
+ 'passphrase_response': (200, {"records": [
+ {
+ "uuid": "4b71a7fb-45ff-11ec-95ea-005056b3b297",
+ "name": "",
+ "authentication": {
+ "passphrase": "ajdHOvAFSs0LOO0S27GtJZfV",
+ "expiry_time": "2022-02-22T22:30:18-05:00"
+ }
+ }
+ ], "num_records": 1}, None)
+})
+
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'use_rest': 'always',
+ 'source_cluster_name': 'mohan9cluster2',
+ 'source_intercluster_lifs': ['10.193.179.180'],
+ 'dest_hostname': '10.193.179.197',
+ 'dest_cluster_name': 'mohanontap98cluster',
+ 'dest_intercluster_lifs': ['10.193.179.57'],
+ 'passphrase': 'ontapcluster_peer',
+ 'encryption_protocol_proposed': 'none',
+ 'ipspace': 'Default'
+}
+
+
+def test_successful_create_rest():
+ ''' Test successful create '''
+ args = DEFAULT_ARGS
+ del args['encryption_protocol_proposed']
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'cluster/peers', SRR['empty_records']),
+ ('GET', 'cluster/peers', SRR['empty_records']),
+ ('POST', 'cluster/peers', SRR['empty_good']),
+ ('POST', 'cluster/peers', SRR['empty_good'])
+ ])
+ assert create_and_apply(my_module, args)
+
+
+def test_create_idempotency_rest():
+ ''' Test successful create idempotency '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'cluster/peers', SRR['cluster_peer_src']),
+ ('GET', 'cluster/peers', SRR['cluster_peer_dst'])
+ ])
+ assert create_and_apply(my_module, DEFAULT_ARGS)
+
+
+def test_successful_create_without_passphrase_rest():
+ ''' Test successful create '''
+ args = DEFAULT_ARGS
+ del args['passphrase']
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'cluster/peers', SRR['empty_records']),
+ ('GET', 'cluster/peers', SRR['empty_records']),
+ ('POST', 'cluster/peers', SRR['passphrase_response']),
+ ('POST', 'cluster/peers', SRR['empty_good'])
+ ])
+ assert create_and_apply(my_module, args)
+
+
+def test_successful_delete_rest():
+ ''' Test successful delete '''
+ module_args = {'state': 'absent'}
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'cluster/peers', SRR['cluster_peer_src']),
+ ('GET', 'cluster/peers', SRR['cluster_peer_dst']),
+ ('DELETE', 'cluster/peers/1fg98aba-2aa6-11ec-b7be-005fgvb366e1', SRR['empty_good']),
+ ('DELETE', 'cluster/peers/1e698aba-2aa6-11ec-b7be-005056b366e1', SRR['empty_good'])
+ ])
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)
+
+
+def test_delete_idempotency_rest():
+ ''' Test delete idempotency '''
+ module_args = {'state': 'absent'}
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'cluster/peers', SRR['empty_records']),
+ ('GET', 'cluster/peers', SRR['empty_records'])
+ ])
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)
+
+
+def test_error_get_cluster_peer_rest():
+ ''' Test get error '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'cluster/peers', SRR['generic_error']),
+ ])
+ error = create_and_apply(my_module, DEFAULT_ARGS, fail=True)['msg']
+ assert 'calling: cluster/peers: got Expected error.' == error
+
+
+def test_error_delete_cluster_peer_rest():
+ ''' Test delete error '''
+ module_args = {'state': 'absent'}
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'cluster/peers', SRR['cluster_peer_src']),
+ ('GET', 'cluster/peers', SRR['cluster_peer_dst']),
+ ('DELETE', 'cluster/peers/1fg98aba-2aa6-11ec-b7be-005fgvb366e1', SRR['generic_error']),
+ ])
+ error = create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert 'calling: cluster/peers/1fg98aba-2aa6-11ec-b7be-005fgvb366e1: got Expected error.' == error
+
+
+def test_error_create_cluster_peer_rest():
+ ''' Test create error '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'cluster/peers', SRR['empty_records']),
+ ('GET', 'cluster/peers', SRR['empty_records']),
+ ('POST', 'cluster/peers', SRR['generic_error']),
+ ])
+ error = create_and_apply(my_module, DEFAULT_ARGS, fail=True)['msg']
+ assert 'calling: cluster/peers: got Expected error.' == error
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_command.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_command.py
new file mode 100644
index 000000000..38bc6ec96
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_command.py
@@ -0,0 +1,246 @@
+# (c) 2018-2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test for ONTAP Command Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import\
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_error_message, zapi_responses
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import\
+ call_main, create_module, expect_and_capture_ansible_exception, patch_ansible
+
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_command import NetAppONTAPCommand as my_module, main as my_main # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'use_rest': 'use_rest',
+}
+
+
+def cli_output(priv, result, translate=True):
+ prefix = 'NetApp Release'
+ print('HERE', 'start')
+ if priv == 'advanced':
+ prefix = '\n' + prefix
+ if result == "u'77'":
+ result = u'77'
+ elif result == "b'77'":
+ print('HERE', b'77')
+ result = b'77'
+ elif result is None:
+ result = b'7'
+ return {
+ 'cli-output': prefix,
+ 'cli-result-value': result
+ }
+
+
+def build_zapi_response_raw(contents):
+ """ when testing special encodings, we cannot use build_zapi_response as translate_struct converts to text
+ """
+ if netapp_utils.has_netapp_lib():
+ xml = netapp_utils.zapi.NaElement('results')
+ xml.add_attr('status', 'status_ok')
+ xml.add_new_child('cli-output', contents['cli-output'])
+ xml.add_new_child('cli-result-value', contents['cli-result-value'])
+ # print('XML ut:', xml.to_string())
+ xml.add_attr('status', 'passed')
+ return (xml, 'valid')
+ return ('netapp-lib is required', 'invalid')
+
+
+ZRR = zapi_responses({
+ 'cli_version': build_zapi_response_raw(cli_output(None, None)),
+ 'cli_version_advanced': build_zapi_response_raw(cli_output('advanced', None)),
+ 'cli_version_77': build_zapi_response(cli_output(None, '77')),
+ 'cli_version_b77': build_zapi_response_raw(cli_output(None, "b'77'")),
+ 'cli_version_u77': build_zapi_response_raw(cli_output(None, "u'77'")),
+})
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ register_responses([
+ ])
+ module_args = {
+ }
+ error = 'missing required arguments: command'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_default_priv():
+ ''' make sure privilege is not required '''
+ register_responses([
+ ('ZAPI', 'system-cli', ZRR['cli_version']),
+ ])
+ module_args = {
+ 'command': 'version',
+ }
+ msg = call_main(my_main, DEFAULT_ARGS, module_args)['msg']
+ needle = b'<cli-output>NetApp Release'
+ assert needle in msg
+ print('Version (raw): %s' % msg)
+
+
+def test_admin_priv():
+ ''' make sure admin is accepted '''
+ register_responses([
+ ('ZAPI', 'system-cli', ZRR['cli_version']),
+ ])
+ module_args = {
+ 'command': 'version',
+ 'privilege': 'admin',
+ }
+ msg = call_main(my_main, DEFAULT_ARGS, module_args)['msg']
+ needle = b'<cli-output>NetApp Release'
+ assert needle in msg
+ print('Version (raw): %s' % msg)
+
+
+def test_advanced_priv():
+ ''' make sure advanced is not required '''
+ register_responses([
+ ('ZAPI', 'system-cli', ZRR['cli_version_advanced']),
+ ])
+ module_args = {
+ 'command': 'version',
+ 'privilege': 'advanced',
+ }
+ msg = call_main(my_main, DEFAULT_ARGS, module_args)['msg']
+ # Interestingly, the ZAPI returns a slightly different response
+ needle = b'<cli-output>\nNetApp Release'
+ assert needle in msg
+ print('Version (raw): %s' % msg)
+
+
+def get_dict_output(extra_args=None):
+ ''' get result value after calling command module '''
+ module_args = {
+ 'command': 'version',
+ 'return_dict': 'true',
+ }
+ if extra_args:
+ module_args.update(extra_args)
+ dict_output = call_main(my_main, DEFAULT_ARGS, module_args)['msg']
+ print('dict_output: %s' % repr(dict_output))
+ return dict_output
+
+
+def test_dict_output_77():
+ ''' make sure correct value is returned '''
+ register_responses([
+ ('ZAPI', 'system-cli', ZRR['cli_version_77']),
+ ])
+ result = '77'
+ assert get_dict_output()['result_value'] == int(result)
+
+
+def test_dict_output_b77():
+ ''' make sure correct value is returned '''
+ register_responses([
+ ('ZAPI', 'system-cli', ZRR['cli_version_b77']),
+ ])
+ result = b'77'
+ assert get_dict_output()['result_value'] == int(result)
+
+
+def test_dict_output_u77():
+ ''' make sure correct value is returned '''
+ register_responses([
+ ('ZAPI', 'system-cli', ZRR['cli_version_u77']),
+ ])
+ result = "u'77'"
+ assert get_dict_output()['result_value'] == int(eval(result))
+
+
+def test_dict_output_exclude():
+ ''' make sure correct value is returned '''
+ register_responses([
+ ('ZAPI', 'system-cli', ZRR['cli_version']),
+ ('ZAPI', 'system-cli', ZRR['cli_version']),
+ ])
+ dict_output = get_dict_output({'exclude_lines': 'NetApp Release'})
+ assert len(dict_output['stdout_lines']) == 1
+ assert len(dict_output['stdout_lines_filter']) == 0
+ dict_output = get_dict_output({'exclude_lines': 'whatever'})
+ assert len(dict_output['stdout_lines']) == 1
+ assert len(dict_output['stdout_lines_filter']) == 1
+
+
+def test_dict_output_include():
+ ''' make sure correct value is returned '''
+ register_responses([
+ ('ZAPI', 'system-cli', ZRR['cli_version']),
+ ('ZAPI', 'system-cli', ZRR['cli_version']),
+ ])
+ dict_output = get_dict_output({'include_lines': 'NetApp Release'})
+ assert len(dict_output['stdout_lines']) == 1
+ assert len(dict_output['stdout_lines_filter']) == 1
+ dict_output = get_dict_output({'include_lines': 'whatever'})
+ assert len(dict_output['stdout_lines']) == 1
+ assert len(dict_output['stdout_lines_filter']) == 0
+
+
+def test_check_mode():
+ ''' make sure nothing is done '''
+ register_responses([
+ ])
+ module_args = {
+ 'command': 'version',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ my_obj.module.check_mode = True
+ msg = expect_and_capture_ansible_exception(my_obj.apply, 'exit')['msg']
+ needle = "Would run command: '['version']'"
+ assert needle in msg
+ print('Version (raw): %s' % msg)
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.has_netapp_lib')
+def test_missing_netapp_lib(mock_has_netapp_lib):
+ module_args = {
+ 'command': 'version',
+ }
+ mock_has_netapp_lib.return_value = False
+ msg = 'Error: the python NetApp-Lib module is required. Import error: None'
+ assert msg == call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_zapi_errors():
+ ''' make sure nothing is done '''
+ register_responses([
+ ('ZAPI', 'system-cli', ZRR['error']),
+ ('ZAPI', 'system-cli', ZRR['cli_version']),
+ ('ZAPI', 'system-cli', ZRR['cli_version']),
+ ('ZAPI', 'system-cli', ZRR['cli_version']),
+
+ ])
+ module_args = {
+ 'command': 'version',
+ }
+ error = zapi_error_message("Error running command ['version']")
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ # EMS error is ignored
+ assert b'NetApp Release' in call_main(my_main, DEFAULT_ARGS, module_args, fail=False)['msg']
+ # EMS cserver error is ignored
+ assert b'NetApp Release' in call_main(my_main, DEFAULT_ARGS, module_args, fail=False)['msg']
+ # EMS vserver error is ignored
+ module_args = {
+ 'command': 'version',
+ 'vserver': 'svm'
+ }
+ assert b'NetApp Release' in call_main(my_main, DEFAULT_ARGS, module_args, fail=False)['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_debug.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_debug.py
new file mode 100644
index 000000000..55d6f2727
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_debug.py
@@ -0,0 +1,344 @@
+# (c) 2018-2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test for ONTAP debug Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import copy
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_debug \
+ import NetAppONTAPDebug as my_module # module under test
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import \
+ assert_no_warnings, assert_no_warnings_except_zapi, call_main, create_and_apply, create_module, expect_and_capture_ansible_exception, patch_ansible
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_error, zapi_responses
+
+# not available on 2.6 anymore
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+DEFAULT_ARGS = {
+ 'hostname': '10.10.10.10',
+ 'username': 'admin',
+ 'https': 'true',
+ 'validate_certs': 'false',
+ 'password': 'password',
+ 'vserver': 'vserver',
+}
+
+# REST API canned responses when mocking send_request
+SRR = rest_responses({
+ 'is_rest_9_8': (200, dict(version=dict(generation=9, major=8, minor=0, full='dummy_9_8_0')), None),
+ 'one_record_uuid': (200, dict(records=[dict(uuid='a1b2c3')], num_records=1), None),
+ 'one_vserver_record_with_intf': (200, {
+ "records": [{
+ 'name': 'vserver1',
+ 'ip_interfaces': [
+ dict(services=['management'])],
+ }],
+ 'num_records': 1
+ }, None),
+ 'one_user_record': (200, {
+ "records": [{
+ 'name': 'user1',
+ 'applications': [
+ dict(application='http'),
+ dict(application='ontapi'),
+ ],
+ 'locked': False,
+ 'owner': {'name': 'vserver'}
+ }],
+ 'num_records': 1
+ }, None),
+ 'one_user_record_admin': (200, {
+ "records": [{
+ 'name': 'user1',
+ 'applications': [
+ dict(application='http'),
+ dict(application='ontapi'),
+ ],
+ 'locked': False,
+ 'owner': {'name': 'vserver'},
+ 'role': {'name': 'admin'}
+ }],
+ 'num_records': 1
+ }, None),
+ 'ConnectTimeoutError': (400, None, "Connection timed out"),
+ 'Name or service not known': (400, None, "Name or service not known"),
+ 'not_authorized': (400, None, "not authorized for that command"),
+}, allow_override=False)
+
+ZRR = zapi_responses({
+ 'ConnectTimeoutError': build_zapi_error('123', 'ConnectTimeoutError'),
+ 'Name or service not known': build_zapi_error('123', 'Name or service not known'),
+}, allow_override=False)
+
+
+if netapp_utils.has_netapp_lib():
+ REST_ZAPI_FLOW = [
+ ('system-get-version', ZRR['version']), # get version
+ ('GET', 'cluster', SRR['is_rest_9_8']), # get_version
+ ]
+else:
+ REST_ZAPI_FLOW = [
+ ('GET', 'cluster', SRR['is_rest_9_8']), # get_version
+ ]
+
+
+def test_success_no_vserver():
+ ''' test get'''
+ register_responses(REST_ZAPI_FLOW + [
+ ('GET', 'security/accounts', SRR['one_user_record_admin']) # get user
+ ])
+ args = dict(DEFAULT_ARGS)
+ args.pop('vserver')
+ results = create_and_apply(my_module, args)
+ print('Info: %s' % results)
+ assert_no_warnings_except_zapi()
+ assert 'notes' in results
+ assert 'msg' in results
+ assert "NOTE: application console not found for user: user1: ['http', 'ontapi']" in results['notes']
+ assert 'ZAPI connected successfully.' in results['msg']
+
+
+def test_success_with_vserver():
+ ''' test get'''
+ register_responses(REST_ZAPI_FLOW + [
+ ('GET', 'security/accounts', SRR['one_user_record']), # get user
+ ('GET', 'svm/svms', SRR['one_vserver_record_with_intf']), # get_svms
+ ('GET', 'security/accounts', SRR['one_user_record']) # get_users
+ ])
+
+ results = create_and_apply(my_module, DEFAULT_ARGS)
+ print('Info: %s' % results)
+ # assert results['changed'] is False
+ assert_no_warnings_except_zapi()
+ assert 'notes' not in results
+
+
+def test_fail_with_vserver_locked():
+ ''' test get'''
+ user = copy.deepcopy(SRR['one_user_record'])
+ user[1]['records'][0]['locked'] = True
+ register_responses(REST_ZAPI_FLOW + [
+ ('GET', 'security/accounts', SRR['one_user_record']), # get user
+ ('GET', 'svm/svms', SRR['one_vserver_record_with_intf']), # get_svms
+ ('GET', 'security/accounts', user) # get_users
+ ])
+
+ results = create_and_apply(my_module, DEFAULT_ARGS, fail=True)
+ print('Info: %s' % results)
+ assert_no_warnings_except_zapi()
+ assert 'notes' in results
+ assert 'user: user1 is locked on vserver: vserver' in results['notes'][0]
+
+
+def test_fail_with_vserver_missing_app():
+ ''' test get'''
+ user = copy.deepcopy(SRR['one_user_record'])
+ user[1]['records'][0]['applications'] = [dict(application='http')]
+
+ register_responses(REST_ZAPI_FLOW + [
+ ('GET', 'security/accounts', SRR['one_user_record']), # get user
+ ('GET', 'svm/svms', SRR['one_vserver_record_with_intf']), # get_svms
+ ('GET', 'security/accounts', user) # get_users
+ ])
+
+ results = create_and_apply(my_module, DEFAULT_ARGS, fail=True)
+ print('Info: %s' % results)
+ assert_no_warnings_except_zapi()
+ assert 'notes' in results
+ assert 'application ontapi not found for user: user1' in results['notes'][0]
+ assert 'Error: no unlocked user for ontapi on vserver: vserver' in results['msg']
+
+
+def test_fail_with_vserver_list_user_not_found():
+ ''' test get'''
+ register_responses(REST_ZAPI_FLOW + [
+ ('GET', 'security/accounts', SRR['one_user_record']), # get user
+ ('GET', 'svm/svms', SRR['one_vserver_record_with_intf']), # get_svms
+ ('GET', 'security/accounts', SRR['empty_records']) # get_users
+ ])
+
+ results = create_and_apply(my_module, DEFAULT_ARGS, fail=True)
+ print('Info: %s' % results)
+ assert_no_warnings_except_zapi()
+ assert 'Error getting accounts for: vserver: none found' in results['msg']
+
+
+def test_fail_with_vserver_list_user_error_on_get_users():
+ ''' test get'''
+ register_responses(REST_ZAPI_FLOW + [
+ ('GET', 'security/accounts', SRR['one_user_record']), # get user
+ ('GET', 'svm/svms', SRR['one_vserver_record_with_intf']), # get_svms
+ ('GET', 'security/accounts', SRR['generic_error']) # get_users
+ ])
+
+ results = create_and_apply(my_module, DEFAULT_ARGS, fail=True)
+ print('Info: %s' % results)
+ assert_no_warnings_except_zapi()
+ assert 'Error getting accounts for: vserver: calling: security/accounts: got Expected error.' in results['msg']
+
+
+def test_success_with_vserver_list_user_not_authorized():
+ ''' test get'''
+ register_responses(REST_ZAPI_FLOW + [
+ ('GET', 'security/accounts', SRR['one_user_record']), # get user
+ ('GET', 'svm/svms', SRR['one_vserver_record_with_intf']), # get_svms
+ ('GET', 'security/accounts', SRR['not_authorized']) # get_users
+ ])
+
+ results = create_and_apply(my_module, DEFAULT_ARGS)
+ print('Info: %s' % results)
+ assert_no_warnings_except_zapi()
+ assert 'Not autorized to get accounts for: vserver: calling: security/accounts: got not authorized for that command.' in results['msg']
+
+
+def test_fail_with_vserver_no_interface():
+ ''' test get'''
+ vserver = copy.deepcopy(SRR['one_vserver_record_with_intf'])
+ vserver[1]['records'][0].pop('ip_interfaces')
+ register_responses(REST_ZAPI_FLOW + [
+ ('GET', 'security/accounts', SRR['one_user_record_admin']), # get user
+ ('GET', 'svm/svms', vserver), # get_svms
+ ('GET', 'security/accounts', SRR['one_user_record']) # get_users
+ ])
+
+ results = create_and_apply(my_module, DEFAULT_ARGS, fail=True)
+ print('Info: %s' % results)
+ assert_no_warnings_except_zapi()
+ assert 'notes' in results
+ assert "NOTE: application console not found for user: user1: ['http', 'ontapi']" in results['notes']
+ assert 'Error vserver is not associated with a network interface: vserver' in results['msg']
+
+
+def test_fail_with_vserver_not_found():
+ ''' test get'''
+ register_responses(REST_ZAPI_FLOW + [
+ ('GET', 'security/accounts', SRR['one_user_record_admin']), # get user
+ ('GET', 'svm/svms', SRR['empty_records']), # get_svms
+ ('GET', 'security/accounts', SRR['one_user_record']) # get_users
+ ])
+
+ results = create_and_apply(my_module, DEFAULT_ARGS, fail=True)
+ print('Info: %s' % results)
+ assert_no_warnings_except_zapi()
+ assert 'notes' in results
+ assert "NOTE: application console not found for user: user1: ['http', 'ontapi']" in results['notes']
+ assert 'Error getting vserver in list_interfaces: vserver: not found' in results['msg']
+
+
+def test_fail_with_vserver_error_on_get_svms():
+ ''' test get'''
+ register_responses(REST_ZAPI_FLOW + [
+ ('GET', 'security/accounts', SRR['one_user_record_admin']), # get user
+ ('GET', 'svm/svms', SRR['generic_error']), # get_svms
+ ('GET', 'security/accounts', SRR['one_user_record']) # get_users
+ ])
+
+ results = create_and_apply(my_module, DEFAULT_ARGS, fail=True)
+ print('Info: %s' % results)
+ assert_no_warnings_except_zapi()
+ assert 'notes' in results
+ assert "NOTE: application console not found for user: user1: ['http', 'ontapi']" in results['notes']
+ assert 'Error getting vserver in list_interfaces: vserver: calling: svm/svms: got Expected error.' in results['msg']
+
+
+def test_note_with_vserver_no_management_service():
+ ''' test get'''
+ vserver = copy.deepcopy(SRR['one_vserver_record_with_intf'])
+ vserver[1]['records'][0]['ip_interfaces'][0]['services'] = ['data_core']
+ register_responses(REST_ZAPI_FLOW + [
+ ('GET', 'security/accounts', SRR['one_user_record_admin']), # get user
+ ('GET', 'svm/svms', vserver), # get_svms
+ ('GET', 'security/accounts', SRR['one_user_record']) # get_users
+ ])
+
+ results = create_and_apply(my_module, DEFAULT_ARGS)
+ print('Info: %s' % results)
+ assert_no_warnings_except_zapi()
+ assert 'notes' in results
+ assert 'no management policy in services' in results['notes'][2]
+
+
+def test_fail_zapi_error():
+ ''' test get'''
+ register_responses([
+ ('system-get-version', ZRR['error']),
+ ('GET', 'cluster', SRR['is_rest_9_8']), # get_version
+ ('GET', 'security/accounts', SRR['one_user_record']), # get_user
+ ('GET', 'svm/svms', SRR['one_vserver_record_with_intf']), # get_vservers
+ ('GET', 'security/accounts', SRR['one_user_record']), # get_users
+ ('system-get-version', ZRR['ConnectTimeoutError']),
+ ('GET', 'cluster', SRR['is_rest_9_8']), # get_version
+ ('GET', 'security/accounts', SRR['one_user_record']), # get_user
+ ('GET', 'svm/svms', SRR['one_vserver_record_with_intf']), # get_vservers
+ ('GET', 'security/accounts', SRR['one_user_record']), # get_users
+ ('system-get-version', ZRR['Name or service not known']),
+ ('GET', 'cluster', SRR['is_rest_9_8']), # get_version
+ ('GET', 'security/accounts', SRR['one_user_record']), # get_user
+ ('GET', 'svm/svms', SRR['one_vserver_record_with_intf']), # get_vservers
+ ('GET', 'security/accounts', SRR['one_user_record']) # get_users
+ ])
+ results = create_and_apply(my_module, DEFAULT_ARGS, fail=True)
+ print('Info: %s' % results)
+ assert_no_warnings_except_zapi()
+ assert 'notes' not in results
+ assert 'Unclassified, see msg' in results['msg']
+ results = create_and_apply(my_module, DEFAULT_ARGS, fail=True)
+ assert 'Error in hostname - Address does not exist or is not reachable: NetApp API failed. Reason - 123:ConnectTimeoutError' in results['msg']
+ results = create_and_apply(my_module, DEFAULT_ARGS, fail=True)
+ assert 'Error in hostname - DNS name cannot be resolved: NetApp API failed. Reason - 123:Name or service not known' in results['msg']
+
+
+def test_fail_rest_error():
+ ''' test get'''
+ register_responses([
+ ('system-get-version', ZRR['version']),
+ ('GET', 'cluster', SRR['is_zapi']), # get_version
+ ('system-get-version', ZRR['version']),
+ ('GET', 'cluster', SRR['ConnectTimeoutError']), # get_version
+ ('system-get-version', ZRR['version']),
+ ('GET', 'cluster', SRR['Name or service not known']), # get_version
+ ])
+ results = create_and_apply(my_module, DEFAULT_ARGS, fail=True)
+ print('Info: %s' % results)
+ assert_no_warnings_except_zapi()
+ assert 'notes' not in results
+ assert 'Other error for hostname: 10.10.10.10 using REST: Unreachable.' in results['msg']
+ results = create_and_apply(my_module, DEFAULT_ARGS, fail=True)
+ assert 'Error in hostname - Address does not exist or is not reachable: Connection timed out' in results['msg']
+ results = create_and_apply(my_module, DEFAULT_ARGS, fail=True)
+ assert 'Error in hostname - DNS name cannot be resolved: Name or service not known' in results['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.has_netapp_lib')
+def test_fail_netapp_lib_error(mock_has_netapp_lib):
+ ''' test get'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8']), # get_version
+ ('GET', 'security/accounts', SRR['one_user_record']), # get_user
+ ('GET', 'svm/svms', SRR['one_vserver_record_with_intf']), # get_vservers
+ ('GET', 'security/accounts', SRR['one_user_record']) # get_users
+ ])
+
+ mock_has_netapp_lib.return_value = False
+
+ results = create_and_apply(my_module, DEFAULT_ARGS, fail=True)
+ print('Info: %s' % results)
+ assert_no_warnings_except_zapi()
+ assert 'notes' not in results
+ assert 'Install the python netapp-lib module or a missing dependency' in results['msg'][0]
+
+
+def test_check_connection_internal_error():
+ ''' expecting REST or ZAPI '''
+ error = 'Internal error, unexpected connection type: rest'
+ assert error == expect_and_capture_ansible_exception(create_module(my_module, DEFAULT_ARGS).check_connection, 'fail', 'rest')['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_disk_options.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_disk_options.py
new file mode 100644
index 000000000..d729b4edd
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_disk_options.py
@@ -0,0 +1,151 @@
+# (c) 2021-2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test for ONTAP fpolicy ext engine Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+import sys
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import \
+ call_main, create_and_apply, patch_ansible
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import \
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_disk_options \
+ import NetAppOntapDiskOptions as my_module, main as my_main # module under test
+
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+
+DEFAULT_ARGS = {
+ 'node': 'node1',
+ 'bkg_firmware_update': False,
+ 'autocopy': False,
+ 'autoassign': False,
+ 'autoassign_policy': 'default',
+ 'hostname': '10.10.10.10',
+ 'username': 'username',
+ 'password': 'password',
+ 'use_rest': 'always'
+}
+
+
+# REST API canned responses when mocking send_request
+SRR = rest_responses({
+ 'one_disk_options_record': (200, {
+ "records": [{
+ 'node': 'node1',
+ 'bkg_firmware_update': False,
+ 'autocopy': False,
+ 'autoassign': False,
+ 'autoassign_policy': 'default'
+ }]
+ }, None),
+ 'one_disk_options_record_on_off': (200, {
+ "records": [{
+ 'node': 'node1',
+ 'bkg_firmware_update': 'on',
+ 'autocopy': 'off',
+ 'autoassign': 'on',
+ 'autoassign_policy': 'default'
+ }]
+ }, None),
+ 'one_disk_options_record_bad_value': (200, {
+ "records": [{
+ 'node': 'node1',
+ 'bkg_firmware_update': 'whatisthis',
+ 'autocopy': 'off',
+ 'autoassign': 'on',
+ 'autoassign_policy': 'default'
+ }]
+ }, None)
+
+}, False)
+
+
+def test_rest_modify_no_action():
+ ''' modify fpolicy ext engine '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'private/cli/storage/disk/option', SRR['one_disk_options_record']),
+ ])
+ assert not create_and_apply(my_module, DEFAULT_ARGS)['changed']
+
+
+def test_rest_modify_prepopulate():
+ ''' modify disk options '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'private/cli/storage/disk/option', SRR['one_disk_options_record']),
+ ('PATCH', 'private/cli/storage/disk/option', SRR['empty_good']),
+ ])
+ args = {'autoassign': True, 'autocopy': True, 'bkg_firmware_update': True}
+ assert create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_rest_modify_on_off():
+ ''' modify disk options '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'private/cli/storage/disk/option', SRR['one_disk_options_record_on_off']),
+ ('PATCH', 'private/cli/storage/disk/option', SRR['empty_good']),
+ ])
+ args = {'autoassign': True, 'autocopy': True, 'bkg_firmware_update': True}
+ assert create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_error_rest_get_not_on_off():
+ ''' modify disk options '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'private/cli/storage/disk/option', SRR['one_disk_options_record_bad_value']),
+ ])
+ args = {'autoassign': True, 'autocopy': True, 'bkg_firmware_update': True}
+ assert create_and_apply(my_module, DEFAULT_ARGS, args, fail=True)['msg'] == 'Unexpected value for field bkg_firmware_update: whatisthis'
+
+
+def test_error_rest_no_zapi_support():
+ ''' modify disk options '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_zapi']),
+ ])
+ args = {'use_rest': 'auto'}
+ assert "na_ontap_disk_options only supports REST, and requires ONTAP 9.6 or later." in call_main(my_main, DEFAULT_ARGS, args, fail=True)['msg']
+
+
+def test_error_get():
+ ''' get disk options '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'private/cli/storage/disk/option', SRR['generic_error']),
+ ])
+ args = {'use_rest': 'auto'}
+ assert "calling: private/cli/storage/disk/option: got Expected error." in call_main(my_main, DEFAULT_ARGS, args, fail=True)['msg']
+
+
+def test_error_get_empty():
+ ''' get disk options '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'private/cli/storage/disk/option', SRR['empty_records']),
+ ])
+ args = {'use_rest': 'auto'}
+ assert "Error on GET private/cli/storage/disk/option, no record." == call_main(my_main, DEFAULT_ARGS, args, fail=True)['msg']
+
+
+def test_error_patch():
+ ''' modify disk options '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'private/cli/storage/disk/option', SRR['one_disk_options_record_on_off']),
+ ('PATCH', 'private/cli/storage/disk/option', SRR['generic_error']),
+ ])
+ args = {'use_rest': 'auto'}
+ assert "calling: private/cli/storage/disk/option: got Expected error." in call_main(my_main, DEFAULT_ARGS, args, fail=True)['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_disks.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_disks.py
new file mode 100644
index 000000000..b59ae7c83
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_disks.py
@@ -0,0 +1,822 @@
+# (c) 2021, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test for ONTAP disks Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch, Mock
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_disks \
+ import NetAppOntapDisks as my_module # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+class MockONTAPConnection():
+ ''' mock server connection to ONTAP host '''
+
+ def __init__(self, kind=None):
+ ''' save arguments '''
+ self.type = kind
+ self.xml_in = None
+ self.xml_out = None
+
+ def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
+ ''' mock invoke_successfully returning xml data '''
+ self.xml_in = xml
+
+ try:
+ container_type = self.xml_in['query']['storage-disk-info']['disk-raid-info']['container-type']
+ except LookupError:
+ container_type = None
+ try:
+ get_owned_disks = self.xml_in['query']['storage-disk-info']['disk-ownership-info']['home-node-name']
+ except LookupError:
+ get_owned_disks = None
+
+ api_call = self.xml_in.get_name()
+
+ if self.type == 'fail':
+ raise netapp_utils.zapi.NaApiError(code='TEST', message="This exception is from the unit test")
+ elif api_call == 'storage-disk-get-iter':
+ if container_type == 'spare':
+ xml = self.home_spare_disks()
+ elif get_owned_disks:
+ xml = self.owned_disks()
+ else:
+ xml = self.partner_spare_disks()
+ elif api_call == 'cf-status':
+ xml = self.partner_node_name()
+ self.xml_out = xml
+ return xml
+
+ @staticmethod
+ def owned_disks():
+ ''' build xml data for disk-inventory-info owned disks '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ data = {
+ 'attributes-list': [
+ {
+ 'storage-disk-info': {
+ 'disk-inventory-info': {
+ 'disk-cluster-name': '1.0.8'
+ }
+ }
+ },
+ {
+ 'storage-disk-info': {
+ 'disk-inventory-info': {
+ 'disk-cluster-name': '1.0.7'
+ }
+ }
+ },
+ {
+ 'storage-disk-info': {
+ 'disk-inventory-info': {
+ 'disk-cluster-name': '1.0.10'
+ }
+ }
+ },
+ {
+ 'storage-disk-info': {
+ 'disk-inventory-info': {
+ 'disk-cluster-name': '1.0.25'
+ }
+ }
+ },
+ {
+ 'storage-disk-info': {
+ 'disk-inventory-info': {
+ 'disk-cluster-name': '1.0.18'
+ }
+ }
+ },
+ {
+ 'storage-disk-info': {
+ 'disk-inventory-info': {
+ 'disk-cluster-name': '1.0.0'
+ }
+ }
+ },
+ {
+ 'storage-disk-info': {
+ 'disk-inventory-info': {
+ 'disk-cluster-name': '1.0.6'
+ }
+ }
+ },
+ {
+ 'storage-disk-info': {
+ 'disk-inventory-info': {
+ 'disk-cluster-name': '1.0.11'
+ }
+ }
+ },
+ {
+ 'storage-disk-info': {
+ 'disk-inventory-info': {
+ 'disk-cluster-name': '1.0.12'
+ }
+ }
+ },
+ {
+ 'storage-disk-info': {
+ 'disk-inventory-info': {
+ 'disk-cluster-name': '1.0.13'
+ }
+ }
+ },
+ {
+ 'storage-disk-info': {
+ 'disk-inventory-info': {
+ 'disk-cluster-name': '1.0.23'
+ }
+ }
+ },
+ {
+ 'storage-disk-info': {
+ 'disk-inventory-info': {
+ 'disk-cluster-name': '1.0.4'
+ }
+ }
+ },
+ {
+ 'storage-disk-info': {
+ 'disk-inventory-info': {
+ 'disk-cluster-name': '1.0.9'
+ }
+ }
+ },
+ {
+ 'storage-disk-info': {
+ 'disk-inventory-info': {
+ 'disk-cluster-name': '1.0.21'
+ }
+ }
+ },
+ {
+ 'storage-disk-info': {
+ 'disk-inventory-info': {
+ 'disk-cluster-name': '1.0.16'
+ }
+ }
+ },
+ {
+ 'storage-disk-info': {
+ 'disk-inventory-info': {
+ 'disk-cluster-name': '1.0.19'
+ }
+ }
+ },
+ {
+ 'storage-disk-info': {
+ 'disk-inventory-info': {
+ 'disk-cluster-name': '1.0.2'
+ }
+ }
+ },
+ {
+ 'storage-disk-info': {
+ 'disk-inventory-info': {
+ 'disk-cluster-name': '1.0.14'
+ }
+ }
+ },
+ {
+ 'storage-disk-info': {
+ 'disk-inventory-info': {
+ 'disk-cluster-name': '1.0.20'
+ }
+ }
+ }
+ ],
+ 'num-records': '19'
+ }
+ xml.translate_struct(data)
+ return xml
+
+ @staticmethod
+ def home_spare_disks():
+ ''' build xml data for disk-inventory-info home spare disks '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ data = {
+ 'attributes-list': [
+ {
+ 'storage-disk-info': {
+ 'disk-inventory-info': {
+ 'disk-cluster-name': '1.0.9'
+ }
+ }
+ },
+ {
+ 'storage-disk-info': {
+ 'disk-inventory-info': {
+ 'disk-cluster-name': '1.0.20'
+ }
+ }
+ },
+ {
+ 'storage-disk-info': {
+ 'disk-inventory-info': {
+ 'disk-cluster-name': '1.0.9'
+ }
+ }
+ },
+ {
+ 'storage-disk-info': {
+ 'disk-inventory-info': {
+ 'disk-cluster-name': '1.0.22'
+ }
+ }
+ },
+ {
+ 'storage-disk-info': {
+ 'disk-inventory-info': {
+ 'disk-cluster-name': '1.0.13'
+ }
+ }
+ },
+ {
+ 'storage-disk-info': {
+ 'disk-inventory-info': {
+ 'disk-cluster-name': '1.0.23'
+ }
+ }
+ },
+ {
+ 'storage-disk-info': {
+ 'disk-inventory-info': {
+ 'disk-cluster-name': '1.0.16'
+ }
+ }
+ },
+ {
+ 'storage-disk-info': {
+ 'disk-inventory-info': {
+ 'disk-cluster-name': '1.0.18'
+ }
+ }
+ }
+ ],
+ 'num-records': '8'
+ }
+ xml.translate_struct(data)
+ return xml
+
+ @staticmethod
+ def partner_spare_disks():
+ ''' build xml data for disk-inventory-info partner spare disks '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ data = {
+ 'attributes-list': [
+ {
+ 'storage-disk-info': {
+ 'disk-inventory-info': {
+ 'disk-cluster-name': '1.0.7'
+ }
+ }
+ },
+ {
+ 'storage-disk-info': {
+ 'disk-inventory-info': {
+ 'disk-cluster-name': '1.0.15'
+ }
+ }
+ },
+ {
+ 'storage-disk-info': {
+ 'disk-inventory-info': {
+ 'disk-cluster-name': '1.0.21'
+ }
+ }
+ },
+ {
+ 'storage-disk-info': {
+ 'disk-inventory-info': {
+ 'disk-cluster-name': '1.0.23'
+ }
+ }
+ },
+ {
+ 'storage-disk-info': {
+ 'disk-inventory-info': {
+ 'disk-cluster-name': '1.0.19'
+ }
+ }
+ },
+ {
+ 'storage-disk-info': {
+ 'disk-inventory-info': {
+ 'disk-cluster-name': '1.0.11'
+ }
+ }
+ }
+ ],
+ 'num-records': '6'
+ }
+ xml.translate_struct(data)
+ return xml
+
+ @staticmethod
+ def partner_node_name():
+ ''' build xml data for partner node name'''
+ xml = netapp_utils.zapi.NaElement('xml')
+ data = {
+ 'cf-status': {
+ 'partner-name': 'node2'
+ }
+ }
+ xml.translate_struct(data)
+ return xml
+
+ @staticmethod
+ def unassigned_disk_count():
+ ''' build xml data for the count of unassigned disks on a node '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ data = {
+ 'num-records': '0'
+ }
+ xml.translate_struct(data)
+ return xml
+
+
+def default_args():
+ args = {
+ 'disk_count': 15,
+ 'node': 'node1',
+ 'disk_type': 'SAS',
+ 'hostname': '10.10.10.10',
+ 'username': 'username',
+ 'password': 'password',
+ 'use_rest': 'always'
+ }
+ return args
+
+
+# REST API canned responses when mocking send_request
+SRR = {
+ # common responses
+ 'is_rest': (200, dict(version=dict(generation=9, major=9, minor=0, full='dummy')), None),
+ 'is_rest_9_8': (200, dict(version=dict(generation=9, major=8, minor=0, full='dummy')), None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'zero_record': (200, dict(records=[], num_records=0), None),
+ 'one_record_uuid': (200, dict(records=[dict(uuid='a1b2c3')], num_records=1), None),
+ 'end_of_sequence': (500, None, "Unexpected call to send_request"),
+ 'generic_error': (400, None, "Expected error"),
+ 'owned_disk_record': (
+ 200, {
+ 'records': [
+ {
+ "name": "1.0.8",
+ "type": "sas",
+ "container_type": "aggregate",
+ "home_node": {
+ "name": "node1"
+ }
+ },
+ {
+ "name": "1.0.7",
+ "type": "sas",
+ "container_type": "spare",
+ "home_node": {
+ "name": "node1"
+ }
+ },
+ {
+ "name": "1.0.10",
+ "type": "sas",
+ "container_type": "aggregate",
+ "home_node": {
+ "name": "node1"
+ }
+ },
+ {
+ "name": "1.0.18",
+ "type": "sas",
+ "container_type": "spare",
+ "home_node": {
+ "name": "node1"
+ }
+ },
+ {
+ "name": "1.0.0",
+ "type": "sas",
+ "container_type": "aggregate",
+ "home_node": {
+ "name": "node1"
+ }
+ },
+ {
+ "name": "1.0.6",
+ "type": "sas",
+ "container_type": "aggregate",
+ "home_node": {
+ "name": "node1"
+ }
+ },
+ {
+ "name": "1.0.11",
+ "type": "sas",
+ "container_type": "spare",
+ "home_node": {
+ "name": "node1"
+ }
+ },
+ {
+ "name": "1.0.12",
+ "type": "sas",
+ "container_type": "aggregate",
+ "home_node": {
+ "name": "node1"
+ }
+ },
+ {
+ "name": "1.0.13",
+ "type": "sas",
+ "container_type": "spare",
+ "home_node": {
+ "name": "node1"
+ }
+ },
+ {
+ "name": "1.0.23",
+ "type": "sas",
+ "container_type": "spare",
+ "home_node": {
+ "name": "node1"
+ }
+ },
+ {
+ "name": "1.0.22",
+ "type": "sas",
+ "container_type": "spare",
+ "home_node": {
+ "name": "node1"
+ }
+ },
+ {
+ "name": "1.0.4",
+ "type": "sas",
+ "container_type": "aggregate",
+ "home_node": {
+ "name": "node1"
+ }
+ },
+ {
+ "name": "1.0.9",
+ "type": "sas",
+ "container_type": "spare",
+ "home_node": {
+ "name": "node1"
+ }
+ },
+ {
+ "name": "1.0.21",
+ "type": "sas",
+ "container_type": "spare",
+ "home_node": {
+ "name": "node1"
+ }
+ },
+ {
+ "name": "1.0.16",
+ "type": "sas",
+ "container_type": "spare",
+ "home_node": {
+ "name": "node1"
+ }
+ },
+ {
+ "name": "1.0.19",
+ "type": "sas",
+ "container_type": "spare",
+ "home_node": {
+ "name": "node1"
+ }
+ },
+ {
+ "name": "1.0.2",
+ "type": "sas",
+ "container_type": "aggregate",
+ "home_node": {
+ "name": "node1"
+ }
+ },
+ {
+ "name": "1.0.14",
+ "type": "sas",
+ "container_type": "aggregate",
+ "home_node": {
+ "name": "node1"
+ }
+ },
+ {
+ "name": "1.0.20",
+ "type": "sas",
+ "container_type": "spare",
+ "home_node": {
+ "name": "node1"
+ }
+ }
+ ],
+ 'num_records': 19},
+ None),
+
+ # 'owned_disk_record': (200, {'num_records': 15}),
+ 'unassigned_disk_record': (
+ 200, {
+ 'records': [],
+ 'num_records': 0},
+ None),
+ 'home_spare_disk_info_record': (
+ 200, {'records': [
+ {
+ 'name': '1.0.20',
+ 'type': 'sas',
+ 'container_type': 'spare',
+ 'home_node': {'name': 'node1'}},
+ {
+ 'name': '1.0.9',
+ 'type': 'sas',
+ 'container_type': 'spare',
+ 'home_node': {'name': 'node1'}},
+ {
+ 'name': '1.0.22',
+ 'type': 'sas',
+ 'container_type': 'spare',
+ 'home_node': {'name': 'node1'}},
+ {
+ 'name': '1.0.13',
+ 'type': 'sas',
+ 'container_type': 'spare',
+ 'home_node': {'name': 'node1'}},
+ {
+ 'name': '1.0.17',
+ 'type': 'sas',
+ 'container_type': 'spare',
+ 'home_node': {'name': 'node1'}},
+ {
+ 'name': '1.0.23',
+ 'type': 'sas',
+ 'container_type': 'spare',
+ 'home_node': {'name': 'node1'}},
+ {
+ 'name': '1.0.16',
+ 'type': 'sas',
+ 'container_type': 'spare',
+ 'home_node': {'name': 'node1'}},
+ {
+ 'name': '1.0.18',
+ 'type': 'sas',
+ 'container_type': 'spare',
+ 'home_node': {'name': 'node1'}}
+ ],
+ 'num_records': 8,
+ '_links': {'self': {'href': '/api/storage/disks?home_node.name=node1&container_type=spare&type=SAS&fields=name'}}},
+ None),
+
+ 'partner_node_name_record': (
+ 200, {'records': [
+ {
+ 'uuid': 'c345c182-a6a0-11eb-af7b-00a0984839de',
+ 'name': 'node2',
+ 'ha': {
+ 'partners': [
+ {'name': 'node1'}
+ ]
+ }
+ }
+ ],
+ 'num_records': 1},
+ None),
+
+ 'partner_spare_disk_info_record': (
+ 200, {'records': [
+ {
+ 'name': '1.0.7',
+ 'type': 'sas',
+ 'container_type': 'spare',
+ 'home_node': {'name': 'node2'}
+ },
+ {
+ 'name': '1.0.15',
+ 'type': 'sas',
+ 'container_type': 'spare',
+ 'home_node': {'name': 'node2'}
+ },
+ {
+ 'name': '1.0.21',
+ 'type': 'sas',
+ 'container_type': 'spare',
+ 'home_node': {'name': 'node2'}
+ },
+ {
+ 'name': '1.0.23',
+ 'type': 'sas',
+ 'container_type': 'spare',
+ 'home_node': {'name': 'node2'}
+ },
+ {
+ 'name': '1.0.19',
+ 'type': 'sas',
+ 'container_type': 'spare',
+ 'home_node': {'name': 'node2'}
+ },
+ {
+ 'name': '1.0.11',
+ 'type': 'sas',
+ 'container_type': 'spare',
+ 'home_node': {'name': 'node2'}
+ }
+ ],
+ 'num_records': 6},
+ None)
+}
+
+
+def test_successful_assign(patch_ansible):
+ ''' successful assign and test idempotency '''
+ args = dict(default_args())
+ args['use_rest'] = 'never'
+ args['disk_count'] = '20'
+ set_module_args(args)
+ my_obj = my_module()
+ my_obj.server = MockONTAPConnection()
+ my_obj.ems_log_event = Mock(return_value=None)
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Create: ' + repr(exc.value))
+ assert exc.value.args[0]['changed']
+ # mock_create.assert_called_with()
+ args['use_rest'] = 'never'
+ args['disk_count'] = '19'
+ set_module_args(args)
+ my_obj = my_module()
+ my_obj.server = MockONTAPConnection()
+ my_obj.ems_log_event = Mock(return_value=None)
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Create: ' + repr(exc.value))
+ assert not exc.value.args[0]['changed']
+
+
+def test_successful_unassign(patch_ansible):
+ ''' successful assign and test idempotency '''
+ args = dict(default_args())
+ args['use_rest'] = 'never'
+ args['disk_count'] = '17'
+ set_module_args(args)
+ my_obj = my_module()
+ my_obj.server = MockONTAPConnection()
+ my_obj.ems_log_event = Mock(return_value=None)
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Create: ' + repr(exc.value))
+ assert exc.value.args[0]['changed']
+ # mock_create.assert_called_with()
+ args['use_rest'] = 'never'
+ args['disk_count'] = '19'
+ set_module_args(args)
+ my_obj = my_module()
+ my_obj.server = MockONTAPConnection()
+ my_obj.ems_log_event = Mock(return_value=None)
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Create: ' + repr(exc.value))
+ assert not exc.value.args[0]['changed']
+
+
+def test_module_fail_when_required_args_missing(patch_ansible):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ my_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+
+def test_ensure_get_called(patch_ansible):
+ ''' test get_disks '''
+ args = dict(default_args())
+ args['use_rest'] = 'never'
+ set_module_args(args)
+ print('starting')
+ my_obj = my_module()
+ print('use_rest:', my_obj.use_rest)
+ my_obj.server = MockONTAPConnection()
+ assert my_obj.get_disks is not None
+
+
+def test_rest_missing_arguments(patch_ansible): # pylint: disable=redefined-outer-name,unused-argument ##WHAT DOES THIS METHOD DO
+ ''' create scope '''
+ args = dict(default_args())
+ del args['hostname']
+ set_module_args(args)
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_module()
+ msg = 'missing required arguments: hostname'
+ assert exc.value.args[0]['msg'] == msg
+
+
+def test_if_all_methods_catch_exception(patch_ansible):
+ args = dict(default_args())
+ args['use_rest'] = 'never'
+ set_module_args(args)
+ my_obj = my_module()
+ my_obj.server = MockONTAPConnection('fail')
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.get_disks(container_type='owned', node='node1')
+ assert 'Error getting disk ' in exc.value.args[0]['msg']
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.get_disks(container_type='unassigned')
+ assert 'Error getting disk ' in exc.value.args[0]['msg']
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.get_disks(container_type='spare', node='node1')
+ assert 'Error getting disk ' in exc.value.args[0]['msg']
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.get_partner_node_name()
+ assert 'Error getting partner name ' in exc.value.args[0]['msg']
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.disk_assign(needed_disks=2)
+ assert 'Error assigning disks ' in exc.value.args[0]['msg']
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.disk_unassign(['1.0.0', '1.0.1'])
+ assert 'Error unassigning disks ' in exc.value.args[0]['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_assign(mock_request, patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' Steal disks from partner node and assign them to the requested node '''
+ args = dict(default_args())
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['owned_disk_record'],
+ SRR['unassigned_disk_record'],
+ SRR['home_spare_disk_info_record'],
+ SRR['partner_node_name_record'],
+ SRR['partner_spare_disk_info_record'],
+ SRR['empty_good'], # unassign
+ SRR['empty_good'], # assign
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is True
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 8
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_unassign(mock_request, patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' Steal disks from partner node and assign them to the requested node '''
+ args = dict(default_args())
+ args['disk_count'] = 17
+ print(args)
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['owned_disk_record'],
+ SRR['unassigned_disk_record'],
+ SRR['home_spare_disk_info_record'],
+ SRR['partner_node_name_record'],
+ SRR['partner_spare_disk_info_record'],
+ SRR['empty_good'], # unassign
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is True
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 6
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_no_action(mock_request, patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' disk_count matches arguments, do nothing '''
+ args = dict(default_args())
+ args['disk_count'] = 19
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['owned_disk_record'],
+ SRR['unassigned_disk_record'],
+ SRR['home_spare_disk_info_record'], # get
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is False
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 4
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_dns.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_dns.py
new file mode 100644
index 000000000..c592f5c88
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_dns.py
@@ -0,0 +1,388 @@
+# (c) 2018-2023, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests for Ansible module: na_ontap_dns'''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import\
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_error_message, rest_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_error, build_zapi_response, zapi_error_message, zapi_responses
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import call_main, create_module, expect_and_capture_ansible_exception,\
+ patch_ansible, assert_warning_was_raised, print_warnings
+
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_dns import main as my_main, NetAppOntapDns as my_module # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+# REST API canned responses when mocking send_request
+SRR = rest_responses({
+ # common responses
+ 'is_rest': (200, {}, None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'end_of_sequence': (500, None, "Unexpected call to send_request"),
+ 'generic_error': (400, None, "Expected error"),
+ 'dns_record': (200, {"records": [{"domains": ['test.com'],
+ "servers": ['0.0.0.0'],
+ "svm": {"name": "svm1", "uuid": "02c9e252-41be-11e9-81d5-00a0986138f7"}}]}, None),
+ 'cluster_data': (200, {"dns_domains": ['test.com'],
+ "name_servers": ['0.0.0.0'],
+ "name": "cserver",
+ "uuid": "C2c9e252-41be-11e9-81d5-00a0986138f7"}, None),
+ 'cluster_name': (200, {"name": "cserver",
+ "uuid": "C2c9e252-41be-11e9-81d5-00a0986138f7"}, None),
+})
+
+dns_info = {
+ 'attributes': {
+ 'net-dns-info': {
+ 'name-servers': [{'ip-address': '0.0.0.0'}],
+ 'domains': [{'string': 'test.com'}],
+ 'skip-config-validation': 'true'
+ }
+ }
+}
+
+
+ZRR = zapi_responses({
+ 'dns_info': build_zapi_response(dns_info),
+ 'error_15661': build_zapi_error(15661, 'not_found'),
+})
+
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'nameservers': ['0.0.0.0'],
+ 'domains': ['test.com'],
+}
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ register_responses([
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ }
+ error = 'Error: vserver is a required parameter with ZAPI.'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_zapi_get_error():
+ register_responses([
+ ('ZAPI', 'net-dns-get', ZRR['error']),
+ ('ZAPI', 'net-dns-get', ZRR['error_15661']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'vserver': 'svm_abc',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ # get
+ error = zapi_error_message('Error getting DNS info')
+ assert error in expect_and_capture_ansible_exception(my_obj.get_dns, 'fail')['msg']
+ assert my_obj.get_dns() is None
+
+
+def test_idempotent_modify_dns():
+ register_responses([
+ ('ZAPI', 'net-dns-get', ZRR['dns_info']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'vserver': 'svm_abc',
+ }
+
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_zapi_modify_dns():
+ register_responses([
+ ('ZAPI', 'net-dns-get', ZRR['dns_info']),
+ ('ZAPI', 'net-dns-modify', ZRR['success']),
+ # idempotency
+ ('ZAPI', 'net-dns-get', ZRR['dns_info']),
+ # error
+ ('ZAPI', 'net-dns-get', ZRR['dns_info']),
+ ('ZAPI', 'net-dns-modify', ZRR['error']),
+ ])
+ module_args = {
+ 'domains': ['new_test.com'],
+ 'nameservers': ['1.2.3.4'],
+ 'skip_validation': True,
+ 'use_rest': 'never',
+ 'vserver': 'svm_abc',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ module_args = {
+ 'domains': ['test.com'],
+ 'skip_validation': True,
+ 'use_rest': 'never',
+ 'vserver': 'svm_abc',
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ module_args = {
+ 'domains': ['new_test.com'],
+ 'nameservers': ['1.2.3.4'],
+ 'skip_validation': True,
+ 'use_rest': 'never',
+ 'vserver': 'svm_abc',
+ }
+ error = zapi_error_message('Error modifying dns')
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_zapi_create_dns():
+ register_responses([
+ ('ZAPI', 'net-dns-get', ZRR['empty']),
+ ('ZAPI', 'net-dns-create', ZRR['success']),
+ # idempotency
+ ('ZAPI', 'net-dns-get', ZRR['dns_info']),
+ # error
+ ('ZAPI', 'net-dns-get', ZRR['empty']),
+ ('ZAPI', 'net-dns-create', ZRR['error']),
+ ])
+ module_args = {
+ 'domains': ['test.com'],
+ 'skip_validation': True,
+ 'use_rest': 'never',
+ 'vserver': 'svm_abc',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ error = zapi_error_message('Error creating dns')
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_zapi_delete_dns():
+ register_responses([
+ ('ZAPI', 'net-dns-get', ZRR['dns_info']),
+ ('ZAPI', 'net-dns-destroy', ZRR['success']),
+ # idempotency
+ ('ZAPI', 'net-dns-get', ZRR['empty']),
+ # error
+ ('ZAPI', 'net-dns-get', ZRR['dns_info']),
+ ('ZAPI', 'net-dns-destroy', ZRR['error']),
+ ])
+ module_args = {
+ 'domains': ['new_test.com'],
+ 'state': 'absent',
+ 'use_rest': 'never',
+ 'vserver': 'svm_abc',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ error = zapi_error_message('Error destroying dns')
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_rest_error():
+ module_args = {
+ 'use_rest': 'always',
+ }
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'cluster', SRR['generic_error']),
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ # create
+ ('PATCH', 'cluster', SRR['generic_error']),
+ ('PATCH', 'cluster', SRR['generic_error']),
+ ('POST', 'name-services/dns', SRR['generic_error']),
+ # delete
+ ('DELETE', 'name-services/dns/uuid', SRR['generic_error']),
+ # read
+ ('GET', 'name-services/dns', SRR['generic_error']),
+ # modify
+ ('PATCH', 'cluster', SRR['generic_error']),
+ ('PATCH', 'name-services/dns/uuid', SRR['generic_error']),
+ ])
+ error = rest_error_message('Error getting cluster info', 'cluster')
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ # create
+ my_obj.is_cluster = True
+ error = rest_error_message('Error updating cluster DNS options', 'cluster')
+ assert error in expect_and_capture_ansible_exception(my_obj.create_dns_rest, 'fail')['msg']
+ my_obj.is_cluster = False
+ # still cluster scope, as verserver is not set
+ assert error in expect_and_capture_ansible_exception(my_obj.create_dns_rest, 'fail')['msg']
+ my_obj.parameters['vserver'] = 'vserver'
+ error = rest_error_message('Error creating DNS service', 'name-services/dns')
+ assert error in expect_and_capture_ansible_exception(my_obj.create_dns_rest, 'fail')['msg']
+ # delete
+ my_obj.is_cluster = True
+ error = 'Error: cluster scope when deleting DNS with REST requires ONTAP 9.9.1 or later.'
+ assert error in expect_and_capture_ansible_exception(my_obj.destroy_dns_rest, 'fail', {})['msg']
+ my_obj.is_cluster = False
+ error = rest_error_message('Error deleting DNS service', 'name-services/dns/uuid')
+ assert error in expect_and_capture_ansible_exception(my_obj.destroy_dns_rest, 'fail', {'uuid': 'uuid'})['msg']
+ # read, cluster scope
+ del my_obj.parameters['vserver']
+ error = rest_error_message('Error getting DNS service', 'name-services/dns')
+ assert error in expect_and_capture_ansible_exception(my_obj.get_dns_rest, 'fail')['msg']
+ # modify
+ dns_attrs = {
+ 'domains': [],
+ 'nameservers': [],
+ 'uuid': 'uuid',
+ }
+ my_obj.is_cluster = True
+ error = rest_error_message('Error updating cluster DNS options', 'cluster')
+ assert error in expect_and_capture_ansible_exception(my_obj.modify_dns_rest, 'fail', dns_attrs)['msg']
+ my_obj.is_cluster = False
+ error = rest_error_message('Error modifying DNS configuration', 'name-services/dns/uuid')
+ assert error in expect_and_capture_ansible_exception(my_obj.modify_dns_rest, 'fail', dns_attrs)['msg']
+
+
+def test_rest_successfully_create():
+ module_args = {
+ 'use_rest': 'always',
+ 'vserver': 'svm_abc',
+ 'skip_validation': True
+ }
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'name-services/dns', SRR['zero_records']),
+ ('POST', 'name-services/dns', SRR['success']),
+ ])
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_successfully_create_is_cluster_vserver():
+ module_args = {
+ 'use_rest': 'always',
+ 'vserver': 'cserver'
+ }
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'name-services/dns', SRR['zero_records']),
+ ('GET', 'cluster', SRR['cluster_name']),
+ ('PATCH', 'cluster', SRR['empty_good']),
+ ])
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_idempotent_create_dns():
+ module_args = {
+ 'use_rest': 'always',
+ 'vserver': 'svm_abc',
+ }
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'name-services/dns', SRR['dns_record']),
+ ])
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_successfully_destroy():
+ module_args = {
+ 'state': 'absent',
+ 'use_rest': 'always',
+ 'vserver': 'svm_abc',
+ }
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'name-services/dns', SRR['dns_record']),
+ ('DELETE', 'name-services/dns/02c9e252-41be-11e9-81d5-00a0986138f7', SRR['success']),
+ ])
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_idempotently_destroy():
+ module_args = {
+ 'state': 'absent',
+ 'use_rest': 'always',
+ 'vserver': 'svm_abc',
+ }
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'name-services/dns', SRR['zero_records']),
+ ('GET', 'cluster', SRR['cluster_data']),
+ ])
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_successfully_modify():
+ module_args = {
+ 'domains': 'new_test.com',
+ 'state': 'present',
+ 'use_rest': 'always',
+ 'vserver': 'svm_abc'
+ }
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'name-services/dns', SRR['dns_record']),
+ ('PATCH', 'name-services/dns/02c9e252-41be-11e9-81d5-00a0986138f7', SRR['success']),
+ ])
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_successfully_modify_is_cluster_vserver():
+ module_args = {
+ 'domains': 'new_test.com',
+ 'state': 'present',
+ 'use_rest': 'always',
+ 'vserver': 'cserver'
+ }
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'name-services/dns', SRR['zero_records']),
+ ('GET', 'cluster', SRR['cluster_data']),
+ ('PATCH', 'cluster', SRR['empty_good']),
+ ])
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_idempotently_modify():
+ module_args = {
+ 'state': 'present',
+ 'use_rest': 'always',
+ 'vserver': 'svm_abc',
+ }
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'name-services/dns', SRR['dns_record']),
+ ])
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_successfully_modify_is_cluster_skip_validation():
+ module_args = {
+ 'domains': 'new_test.com',
+ 'state': 'present',
+ 'use_rest': 'always',
+ 'skip_validation': True
+ }
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'name-services/dns', SRR['zero_records']),
+ ('PATCH', 'cluster', SRR['empty_good']),
+ # error if used skip_validation on earlier versions.
+ ('GET', 'cluster', SRR['is_rest']),
+ ])
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert_warning_was_raised("skip_validation is ignored for cluster DNS operations in REST.")
+ assert 'Error: Minimum version of ONTAP for skip_validation is (9, 9, 1)' in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.has_netapp_lib')
+def test_has_netapp_lib(has_netapp_lib):
+ module_args = {
+ 'state': 'present',
+ 'use_rest': 'never',
+ 'vserver': 'svm_abc',
+ }
+ has_netapp_lib.return_value = False
+ assert call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg'] == 'Error: the python NetApp-Lib module is required. Import error: None'
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_domain_tunnel.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_domain_tunnel.py
new file mode 100644
index 000000000..eb08bf205
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_domain_tunnel.py
@@ -0,0 +1,145 @@
+''' unit tests ONTAP Ansible module: na_ontap_domain_tunnel '''
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat import unittest
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_domain_tunnel \
+ import NetAppOntapDomainTunnel as domain_tunnel_module # module under test
+
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+# REST API canned responses when mocking send_request
+SRR = {
+ # common responses
+ 'is_rest': (200, {}, None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'end_of_sequence': (500, None, "Ooops, the UT needs one more SRR response"),
+ 'generic_error': (400, None, {'message': "expected error", 'code': '5'}),
+ # module specific responses
+ 'domain_tunnel_record': (200, {
+ 'svm': {
+ 'name': 'ansible'
+ }
+ }, None)
+}
+
+
+class TestMyModule(unittest.TestCase):
+ ''' a group of related Unit Tests '''
+
+ def setUp(self):
+ self.mock_domain_tunnel = {
+ "hostname": '10.10.10.10',
+ "username": 'username',
+ "password": 'password',
+ "vserver": 'ansible'
+ }
+
+ def set_default_args(self):
+ return {
+ 'state': 'present',
+ 'hostname': self.mock_domain_tunnel['hostname'],
+ 'username': self.mock_domain_tunnel['username'],
+ 'password': self.mock_domain_tunnel['password'],
+ 'vserver': self.mock_domain_tunnel['vserver']
+ }
+
+ def get_domain_tunnel_mock_object(self):
+ domain_tunnel_obj = domain_tunnel_module()
+ return domain_tunnel_obj
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_error(self, mock_request):
+ data = self.set_default_args()
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['generic_error'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_domain_tunnel_mock_object().apply()
+ assert exc.value.args[0]['msg'] == SRR['generic_error'][2]
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_successful_create_rest(self, mock_request):
+ data = self.set_default_args()
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['empty_good'], # get
+ SRR['empty_good'], # post
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_domain_tunnel_mock_object().apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_idempotent_create_rest(self, mock_request):
+ data = self.set_default_args()
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['domain_tunnel_record'], # get
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_domain_tunnel_mock_object().apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_successful_modify_rest(self, mock_request):
+ data = self.set_default_args()
+ data['vserver'] = ['ansible1']
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['domain_tunnel_record'], # get
+ SRR['domain_tunnel_record'], # modify
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_domain_tunnel_mock_object().apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_successful_delete_rest(self, mock_request):
+ data = self.set_default_args()
+ data['state'] = 'absent'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['domain_tunnel_record'], # get
+ SRR['empty_good'], # delete
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_domain_tunnel_mock_object().apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_idempotent_delete_rest(self, mock_request):
+ data = self.set_default_args()
+ data['state'] = 'absent'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['empty_good'], # get
+ SRR['empty_good'], # delete
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_domain_tunnel_mock_object().apply()
+ assert not exc.value.args[0]['changed']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_efficiency_policy.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_efficiency_policy.py
new file mode 100644
index 000000000..05270aef1
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_efficiency_policy.py
@@ -0,0 +1,422 @@
+# (c) 2018-2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests for Ansible module: na_ontap_vscan_scanner_pool '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+import sys
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_efficiency_policy \
+ import NetAppOntapEfficiencyPolicy as efficiency_module # module under test
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, patch_ansible, create_module, create_and_apply, expect_and_capture_ansible_exception
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke,\
+ register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_responses
+
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+
+DEFAULT_ARGS = {
+ 'state': 'present',
+ 'vserver': 'svm3',
+ 'policy_name': 'test_policy',
+ 'comment': 'This policy is for x and y',
+ 'enabled': True,
+ 'qos_policy': 'background',
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'use_rest': 'never'
+}
+
+
+threshold_info = {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'sis-policy-info': {
+ 'changelog-threshold-percent': 10,
+ 'comment': 'This policy is for x and y',
+ 'enabled': 'true',
+ 'policy-name': 'test_policy',
+ 'policy-type': 'threshold',
+ 'qos-policy': 'background',
+ 'vserver': 'svm3'
+ }
+ }
+}
+
+schedule_info = {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'sis-policy-info': {
+ 'comment': 'This policy is for x and y',
+ 'duration': 10,
+ 'enabled': 'true',
+ 'policy-name': 'test_policy',
+ 'policy-type': 'scheduled',
+ 'qos-policy': 'background',
+ 'vserver': 'svm3'
+ }
+ }
+}
+
+ZRR = zapi_responses({
+ 'threshold_info': build_zapi_response(threshold_info),
+ 'schedule_info': build_zapi_response(schedule_info)
+})
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ efficiency_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+
+def test_get_nonexistent_efficiency_policy():
+ register_responses([
+ ('sis-policy-get-iter', ZRR['empty'])
+ ])
+ efficiency_obj = create_module(efficiency_module, DEFAULT_ARGS)
+ result = efficiency_obj.get_efficiency_policy()
+ assert not result
+
+
+def test_get_existing_efficiency_policy():
+ register_responses([
+ ('sis-policy-get-iter', ZRR['threshold_info'])
+ ])
+ efficiency_obj = create_module(efficiency_module, DEFAULT_ARGS)
+ result = efficiency_obj.get_efficiency_policy()
+ assert result
+
+
+def test_successfully_create():
+ register_responses([
+ ('sis-policy-get-iter', ZRR['empty']),
+ ('sis-policy-create', ZRR['success'])
+ ])
+ args = {'policy_type': 'threshold'}
+ assert create_and_apply(efficiency_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_create_idempotency():
+ register_responses([
+ ('sis-policy-get-iter', ZRR['threshold_info'])
+ ])
+ args = {'policy_type': 'threshold'}
+ assert create_and_apply(efficiency_module, DEFAULT_ARGS)['changed'] is False
+
+
+def test_threshold_duration_failure():
+ register_responses([
+ ('sis-policy-get-iter', ZRR['threshold_info'])
+ ])
+ args = {'duration': 1}
+ msg = create_and_apply(efficiency_module, DEFAULT_ARGS, args, fail=True)['msg']
+ assert "duration cannot be set if policy_type is threshold" == msg
+
+
+def test_threshold_schedule_failure():
+ register_responses([
+ ('sis-policy-get-iter', ZRR['threshold_info'])
+ ])
+ args = {'schedule': 'test_job_schedule'}
+ msg = create_and_apply(efficiency_module, DEFAULT_ARGS, args, fail=True)['msg']
+ assert "schedule cannot be set if policy_type is threshold" == msg
+
+
+def test_scheduled_threshold_percent_failure():
+ register_responses([
+ ('sis-policy-get-iter', ZRR['schedule_info'])
+ ])
+ args = {'changelog_threshold_percent': 30}
+ msg = create_and_apply(efficiency_module, DEFAULT_ARGS, args, fail=True)['msg']
+ assert "changelog_threshold_percent cannot be set if policy_type is scheduled" == msg
+
+
+def test_successfully_delete():
+ register_responses([
+ ('sis-policy-get-iter', ZRR['threshold_info']),
+ ('sis-policy-delete', ZRR['success'])
+ ])
+ args = {'state': 'absent'}
+ assert create_and_apply(efficiency_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_delete_idempotency():
+ register_responses([
+ ('sis-policy-get-iter', ZRR['empty'])
+ ])
+ args = {'state': 'absent'}
+ assert create_and_apply(efficiency_module, DEFAULT_ARGS, args)['changed'] is False
+
+
+def test_successful_modify():
+ register_responses([
+ ('sis-policy-get-iter', ZRR['schedule_info']),
+ ('sis-policy-modify', ZRR['success'])
+ ])
+ args = {'policy_type': 'threshold'}
+ assert create_and_apply(efficiency_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_if_all_methods_catch_exception():
+ register_responses([
+ ('sis-policy-get-iter', ZRR['error']),
+ ('sis-policy-create', ZRR['error']),
+ ('sis-policy-modify', ZRR['error']),
+ ('sis-policy-delete', ZRR['error'])
+ ])
+ module_args = {
+ 'schedule': 'test_job_schedule'
+ }
+
+ my_obj = create_module(efficiency_module, DEFAULT_ARGS, module_args)
+
+ error = expect_and_capture_ansible_exception(my_obj.get_efficiency_policy, 'fail')['msg']
+ assert 'Error searching for efficiency policy test_policy: NetApp API failed. Reason - 12345:synthetic error for UT purpose' in error
+
+ error = expect_and_capture_ansible_exception(my_obj.create_efficiency_policy, 'fail')['msg']
+ assert 'Error creating efficiency policy test_policy: NetApp API failed. Reason - 12345:synthetic error for UT purpose' in error
+
+ error = expect_and_capture_ansible_exception(my_obj.modify_efficiency_policy, 'fail', modify={'schedule': 'test_job_schedule'})['msg']
+ assert 'Error modifying efficiency policy test_policy: NetApp API failed. Reason - 12345:synthetic error for UT purpose' in error
+
+ error = expect_and_capture_ansible_exception(my_obj.delete_efficiency_policy, 'fail')['msg']
+ assert 'Error deleting efficiency policy test_policy: NetApp API failed. Reason - 12345:synthetic error for UT purpose' in error
+
+
+def test_switch_to_zapi():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('sis-policy-get-iter', ZRR['schedule_info'])
+ ])
+ args = {'use_rest': 'auto'}
+ assert create_and_apply(efficiency_module, DEFAULT_ARGS, args)['changed'] is False
+
+
+SRR = rest_responses({
+ 'threshold_policy_info': (200, {"records": [
+ {
+ "uuid": "d0845ae1-a8a8-11ec-aa26-005056b323e5",
+ "svm": {"name": "svm3"},
+ "name": "test_policy",
+ "type": "threshold",
+ "start_threshold_percent": 30,
+ "qos_policy": "background",
+ "enabled": True,
+ "comment": "This policy is for x and y"
+ }
+ ], "num_records": 1}, None),
+ 'scheduled_policy_info': (200, {"records": [
+ {
+ "uuid": "0d1f0860-a8a9-11ec-aa26-005056b323e5",
+ "svm": {"name": "svm3"},
+ "name": "test_policy",
+ "type": "scheduled",
+ "duration": 5,
+ "schedule": {"name": "daily"},
+ "qos_policy": "background",
+ "enabled": True,
+ "comment": "This policy is for x and y"
+ }
+ ], "num_records": 1}, None),
+})
+
+
+def test_successful_create_rest():
+ ''' Test successful create '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/volume-efficiency-policies', SRR['empty_records']),
+ ('POST', 'storage/volume-efficiency-policies', SRR['success'])
+ ])
+ args = {'policy_type': 'threshold', 'use_rest': 'always'}
+ assert create_and_apply(efficiency_module, DEFAULT_ARGS, args)
+
+
+def test_create_idempotency_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/volume-efficiency-policies', SRR['threshold_policy_info'])
+ ])
+ args = {'policy_type': 'threshold', 'use_rest': 'always'}
+ assert create_and_apply(efficiency_module, DEFAULT_ARGS, args)['changed'] is False
+
+
+def test_threshold_duration_failure_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/volume-efficiency-policies', SRR['threshold_policy_info'])
+ ])
+ args = {'duration': 1, 'use_rest': 'always'}
+ msg = create_and_apply(efficiency_module, DEFAULT_ARGS, args, fail=True)['msg']
+ assert "duration cannot be set if policy_type is threshold" == msg
+
+
+def test_threshold_schedule_failure_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/volume-efficiency-policies', SRR['threshold_policy_info'])
+ ])
+ args = {'schedule': 'test_job_schedule', 'use_rest': 'always'}
+ msg = create_and_apply(efficiency_module, DEFAULT_ARGS, args, fail=True)['msg']
+ assert "schedule cannot be set if policy_type is threshold" == msg
+
+
+def test_scheduled_threshold_percent_failure_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/volume-efficiency-policies', SRR['scheduled_policy_info'])
+ ])
+ args = {'changelog_threshold_percent': 30, 'use_rest': 'always'}
+ msg = create_and_apply(efficiency_module, DEFAULT_ARGS, args, fail=True)['msg']
+ assert "changelog_threshold_percent cannot be set if policy_type is scheduled" == msg
+
+
+def test_successfully_delete_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/volume-efficiency-policies', SRR['scheduled_policy_info']),
+ ('DELETE', 'storage/volume-efficiency-policies/0d1f0860-a8a9-11ec-aa26-005056b323e5', SRR['success'])
+ ])
+ args = {'state': 'absent', 'use_rest': 'always'}
+ assert create_and_apply(efficiency_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_delete_idempotency_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/volume-efficiency-policies', SRR['empty_records'])
+ ])
+ args = {'state': 'absent', 'use_rest': 'always'}
+ assert create_and_apply(efficiency_module, DEFAULT_ARGS, args)['changed'] is False
+
+
+def test_successful_modify_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/volume-efficiency-policies', SRR['scheduled_policy_info']),
+ ('PATCH', 'storage/volume-efficiency-policies/0d1f0860-a8a9-11ec-aa26-005056b323e5', SRR['success'])
+ ])
+ args = {'policy_type': 'threshold', 'use_rest': 'always'}
+ assert create_and_apply(efficiency_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_successful_modify_duration_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/volume-efficiency-policies', SRR['scheduled_policy_info']),
+ ('PATCH', 'storage/volume-efficiency-policies/0d1f0860-a8a9-11ec-aa26-005056b323e5', SRR['success'])
+ ])
+ args = {'duration': 10, 'use_rest': 'always'}
+ assert create_and_apply(efficiency_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_successful_modify_duration_set_hyphen_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/volume-efficiency-policies', SRR['scheduled_policy_info']),
+ ('PATCH', 'storage/volume-efficiency-policies/0d1f0860-a8a9-11ec-aa26-005056b323e5', SRR['success'])
+ ])
+ args = {'duration': "-", 'use_rest': 'always'}
+ assert create_and_apply(efficiency_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_successful_modify_changelog_threshold_percent_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/volume-efficiency-policies', SRR['threshold_policy_info']),
+ ('PATCH', 'storage/volume-efficiency-policies/d0845ae1-a8a8-11ec-aa26-005056b323e5', SRR['success'])
+ ])
+ args = {'changelog_threshold_percent': 40, 'use_rest': 'always'}
+ assert create_and_apply(efficiency_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_if_all_methods_catch_exception_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/volume-efficiency-policies', SRR['generic_error']),
+ ('POST', 'storage/volume-efficiency-policies', SRR['generic_error']),
+ ('PATCH', 'storage/volume-efficiency-policies', SRR['generic_error']),
+ ('DELETE', 'storage/volume-efficiency-policies', SRR['generic_error'])
+ ])
+ module_args = {
+ 'schedule': 'test_job_schedule',
+ 'use_rest': 'always'
+ }
+
+ my_obj = create_module(efficiency_module, DEFAULT_ARGS, module_args)
+
+ error = expect_and_capture_ansible_exception(my_obj.get_efficiency_policy, 'fail')['msg']
+ assert 'calling: storage/volume-efficiency-policies: got Expected error.' in error
+
+ error = expect_and_capture_ansible_exception(my_obj.create_efficiency_policy, 'fail')['msg']
+ assert 'calling: storage/volume-efficiency-policies: got Expected error.' in error
+
+ error = expect_and_capture_ansible_exception(my_obj.modify_efficiency_policy, 'fail', modify={'schedule': 'test_job_schedule'})['msg']
+ assert 'calling: storage/volume-efficiency-policies: got Expected error.' in error
+
+ error = expect_and_capture_ansible_exception(my_obj.delete_efficiency_policy, 'fail')['msg']
+ assert 'calling: storage/volume-efficiency-policies: got Expected error.' in error
+
+
+def test_module_error_ontap_version():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96'])
+ ])
+ module_args = {'use_rest': 'always'}
+ msg = create_module(efficiency_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert 'Error: REST requires ONTAP 9.8 or later for efficiency_policy APIs.' == msg
+
+
+def test_module_error_duration_in_threshold():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0'])
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'policy_type': 'threshold',
+ 'duration': 1
+ }
+ msg = create_module(efficiency_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert 'duration cannot be set if policy_type is threshold' == msg
+
+
+def test_module_error_schedule_in_threshold():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0'])
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'policy_type': 'threshold',
+ 'schedule': 'daily'
+ }
+ msg = create_module(efficiency_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert 'schedule cannot be set if policy_type is threshold' == msg
+
+
+def test_module_error_changelog_threshold_percent_in_schedule():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0'])
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'policy_type': 'scheduled',
+ 'changelog_threshold_percent': 20
+ }
+ msg = create_module(efficiency_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert 'changelog_threshold_percent cannot be set if policy_type is scheduled' == msg
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ems_destination.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ems_destination.py
new file mode 100644
index 000000000..ca951ba58
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ems_destination.py
@@ -0,0 +1,226 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests for Ansible module: na_ontap_ems_destination module '''
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import sys
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args, \
+ patch_ansible, create_and_apply, create_module, expect_and_capture_ansible_exception, call_main
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import get_mock_record, \
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_ems_destination \
+ import NetAppOntapEmsDestination as my_module, main as my_main # module under test
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+
+SRR = rest_responses({
+ 'ems_destination': (200, {
+ "records": [
+ {
+ "name": "test",
+ "type": "rest-api",
+ "destination": "https://test.destination",
+ "filters": [
+ {
+ "name": "test-filter"
+ }
+ ]
+ }],
+ "num_records": 1
+ }, None),
+ 'missing_key': (200, {
+ "records": [
+ {
+ "name": "test",
+ "type": "rest_api",
+ "destination": "https://test.destination"
+ }],
+ "num_records": 1
+ }, None)
+})
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+
+}
+
+
+def test_get_ems_destination_none():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'support/ems/destinations', SRR['empty_records'])
+ ])
+ module_args = {'name': 'test', 'type': 'rest_api', 'destination': 'https://test.destination', 'filters': ['test-filter']}
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.get_ems_destination('test') is None
+
+
+def test_get_ems_destination_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'support/ems/destinations', SRR['generic_error'])
+ ])
+ module_args = {'name': 'test', 'type': 'rest_api', 'destination': 'https://test.destination', 'filters': ['test-filter']}
+ my_module_object = create_module(my_module, DEFAULT_ARGS, module_args)
+ msg = 'Error fetching EMS destination for test: calling: support/ems/destinations: got Expected error.'
+ assert msg in expect_and_capture_ansible_exception(my_module_object.get_ems_destination, 'fail', 'test')['msg']
+
+
+def test_create_ems_destination():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'support/ems/destinations', SRR['empty_records']),
+ ('POST', 'support/ems/destinations', SRR['empty_good'])
+ ])
+ module_args = {'name': 'test', 'type': 'rest_api', 'destination': 'https://test.destination', 'filters': ['test-filter']}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_create_ems_destination_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('POST', 'support/ems/destinations', SRR['generic_error'])
+ ])
+ module_args = {'name': 'test', 'type': 'rest_api', 'destination': 'https://test.destination', 'filters': ['test-filter']}
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ error = expect_and_capture_ansible_exception(my_obj.create_ems_destination, 'fail')['msg']
+ print('Info: %s' % error)
+ assert 'Error creating EMS destinations for test: calling: support/ems/destinations: got Expected error.' == error
+
+
+def test_delete_ems_destination():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'support/ems/destinations', SRR['ems_destination']),
+ ('DELETE', 'support/ems/destinations/test', SRR['empty_good'])
+ ])
+ module_args = {'name': 'test', 'type': 'rest_api', 'destination': 'https://test.destination', 'filters': ['test-filter'], 'state': 'absent'}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_delete_ems_destination_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('DELETE', 'support/ems/destinations/test', SRR['generic_error'])
+ ])
+ module_args = {'name': 'test', 'type': 'rest_api', 'destination': 'https://test.destination', 'filters': ['test-filter'], 'state': 'absent'}
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ error = expect_and_capture_ansible_exception(my_obj.delete_ems_destination, 'fail', 'test')['msg']
+ print('Info: %s' % error)
+ assert 'Error deleting EMS destination for test: calling: support/ems/destinations/test: got Expected error.' == error
+
+
+def test_modify_ems_destination_filter():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'support/ems/destinations', SRR['missing_key']),
+ ('PATCH', 'support/ems/destinations/test', SRR['empty_good'])
+ ])
+ module_args = {'name': 'test', 'type': 'rest_api', 'destination': 'https://test.destination', 'filters': ['other-filter']}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_ems_destination_rest_api_idempotent():
+ """ verify that rest-api is equivalent to rest_api """
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'support/ems/destinations', SRR['ems_destination']),
+ ])
+ module_args = {'name': 'test', 'type': 'rest_api', 'destination': 'https://test.destination', 'filters': ['test-filter']}
+ assert not create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_ems_destination_target():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'support/ems/destinations', SRR['ems_destination']),
+ ('PATCH', 'support/ems/destinations/test', SRR['empty_good'])
+ ])
+ module_args = {'name': 'test', 'type': 'rest_api', 'destination': 'https://different.destination', 'filters': ['test-filter']}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_ems_destination_type():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'support/ems/destinations', SRR['ems_destination']),
+ ('DELETE', 'support/ems/destinations/test', SRR['empty_good']),
+ ('POST', 'support/ems/destinations', SRR['empty_good'])
+ ])
+ module_args = {'name': 'test', 'type': 'email', 'destination': 'test@hq.com', 'filters': ['test-filter']}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_ems_destination_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('PATCH', 'support/ems/destinations/test', SRR['generic_error'])
+ ])
+ module_args = {'name': 'test', 'type': 'rest_api', 'destination': 'https://test.destination', 'filters': ['other-filter']}
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ modify = {'filters': ['other-filter']}
+ error = expect_and_capture_ansible_exception(my_obj.modify_ems_destination, 'fail', 'test', modify)['msg']
+ print('Info: %s' % error)
+ assert 'Error modifying EMS destination for test: calling: support/ems/destinations/test: got Expected error.' == error
+
+
+def test_module_fail_without_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_zapi'])
+ ])
+ module_args = {'name': 'test', 'type': 'rest_api', 'destination': 'https://test.destination', 'filters': ['test-filter']}
+ error = call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ print('Info: %s' % error)
+ assert 'na_ontap_ems_destination is only supported with REST API' == error
+
+
+def test_apply_returns_errors_from_get_destination():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'support/ems/destinations', SRR['generic_error'])
+ ])
+ module_args = {'name': 'test', 'type': 'rest_api', 'destination': 'https://test.destination', 'filters': ['test-filter']}
+ error = call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ print('Info: %s' % error)
+ assert 'Error fetching EMS destination for test: calling: support/ems/destinations: got Expected error.' == error
+
+
+def test_check_mode_creates_no_destination():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'support/ems/destinations', SRR['empty_records']),
+ ])
+ module_args = {'name': 'test', 'type': 'rest_api', 'destination': 'https://test.destination', 'filters': ['test-filter']}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args, check_mode=True)['changed']
+
+
+def test_changed_set_to_ok_for_expected_values():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'support/ems/destinations', SRR['ems_destination']),
+ ])
+ module_args = {'name': 'test', 'type': 'rest_api', 'destination': 'https://test.destination', 'filters': ['test-filter']}
+ assert not create_and_apply(my_module, DEFAULT_ARGS, module_args, check_mode=True)['changed']
+
+
+def test_empty_modify_skips_patch():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ])
+ module_args = {'name': 'test', 'type': 'rest_api', 'destination': 'https://test.destination', 'filters': ['test-filter']}
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ my_obj.modify_ems_destination('test', {})
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ems_filter.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ems_filter.py
new file mode 100644
index 000000000..f7f0a1feb
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ems_filter.py
@@ -0,0 +1,308 @@
+# (c) 2023, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests for Ansible module: na_ontap_ems_filter module '''
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import sys
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args, \
+ patch_ansible, create_and_apply, create_module, expect_and_capture_ansible_exception, call_main
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import get_mock_record, \
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_ems_filter \
+ import NetAppOntapEMSFilters as my_module, main as my_main # module under test
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+SRR = rest_responses({
+ 'ems_filter': (200, {
+ "name": "snmp-traphost",
+ "rules": [{
+ "index": "1",
+ "type": "include",
+ "message_criteria": {
+ "severities": "error,informational",
+ "name_pattern": "callhome.*",
+ }
+ }, {
+ "index": "2",
+ "type": "exclude",
+ "message_criteria": {
+ "severities": "*",
+ "name_pattern": "*",
+ "snmp_trap_types": "*",
+ }
+ }]
+ }, None),
+ 'ems_filter_2_riles': (200, {
+ "name": "snmp-traphost",
+ "rules": [{
+ "index": "1",
+ "type": "include",
+ "message_criteria": {
+ "severities": "error,informational",
+ "name_pattern": "callhome.*",
+ }
+ }, {
+ "index": "2",
+ "type": "include",
+ "message_criteria": {
+ "severities": "alert",
+ "name_pattern": "callhome.*",
+ }
+ }, {
+ "index": "3",
+ "type": "exclude",
+ "message_criteria": {
+ "severities": "*",
+ "name_pattern": "*",
+ "snmp_trap_types": "*",
+ }
+ }]
+ }, None),
+ 'ems_filter_no_rules': (200, {
+ "name": "snmp-traphost",
+ }, None)
+})
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'name': "snmp-traphost"
+}
+
+DEFAULT_RULE = [{
+ "index": "1",
+ "type": "include",
+ "message_criteria": {
+ "severities": "error,informational",
+ "name_pattern": "callhome.*",
+ }
+}]
+
+
+DEFAULT_RULE_2_RULES = [{
+ "index": "1",
+ "type": "include",
+ "message_criteria": {
+ "severities": "error,informational",
+ "name_pattern": "callhome.*",
+ }}, {
+ "index": "2",
+ "type": "include",
+ "message_criteria": {
+ "severities": "alert",
+ "name_pattern": "callhome.*",
+ }}]
+
+DEFAULT_RULE_MODIFY_TYPE_2_RULES = [{
+ "index": "1",
+ "type": "include",
+ "message_criteria": {
+ "severities": "error,informational",
+ "name_pattern": "callhome.*",
+ }
+}, {
+ "index": "2",
+ "type": "exclude",
+ "message_criteria": {
+ "severities": "alert",
+ "name_pattern": "callhome.*",
+ }
+}]
+
+DEFAULT_RULE_MODIFY_SEVERITIES_2_RULES = [{
+ "index": "1",
+ "type": "include",
+ "message_criteria": {
+ "severities": "informational",
+ "name_pattern": "callhome.*",
+ }
+}, {
+ "index": "2",
+ "type": "include",
+ "message_criteria": {
+ "severities": "alert",
+ "name_pattern": "callhome.*",
+ }
+}]
+
+DEFAULT_RULE_MODIFY_NAME_PATTERN_2_RULES = [{
+ "index": "1",
+ "type": "include",
+ "message_criteria": {
+ "severities": "error,informational",
+ "name_pattern": "*",
+ }
+}, {
+ "index": "2",
+ "type": "include",
+ "message_criteria": {
+ "severities": "alert",
+ "name_pattern": "callhome.*",
+ }
+}]
+
+DEFAULT_RULE_STARS = [{
+ "index": "1",
+ "type": "include",
+ "message_criteria": {
+ "severities": "*",
+ "name_pattern": "*",
+ }
+}]
+
+
+def test_get_ems_filter_none():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'support/ems/filters', SRR['empty_records'])
+ ])
+ set_module_args(DEFAULT_ARGS)
+ my_obj = my_module()
+ assert my_obj.get_ems_filter() is None
+
+
+def test_get_ems_filter_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'support/ems/filters', SRR['generic_error'])
+ ])
+ my_module_object = create_module(my_module, DEFAULT_ARGS)
+ msg = 'Error fetching ems filter snmp-traphost: calling: support/ems/filters: got Expected error.'
+ assert msg in expect_and_capture_ansible_exception(my_module_object.get_ems_filter, 'fail')['msg']
+
+
+def test_get_ems_filter_get():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'support/ems/filters', SRR['ems_filter'])
+ ])
+ set_module_args(DEFAULT_ARGS)
+ my_obj = my_module()
+ assert my_obj.get_ems_filter() is not None
+
+
+def test_create_ems_filter():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'support/ems/filters', SRR['empty_records']),
+ ('POST', 'support/ems/filters', SRR['empty_good'])
+ ])
+ module_args = {'rules': DEFAULT_RULE}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_create_ems_filter_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('POST', 'support/ems/filters', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['rules'] = DEFAULT_RULE
+ error = expect_and_capture_ansible_exception(my_obj.create_ems_filter, 'fail')['msg']
+ print('Info: %s' % error)
+ assert 'Error creating EMS filter snmp-traphost: calling: support/ems/filters: got Expected error.' == error
+
+
+def test_delete_ems_filter():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'support/ems/filters', SRR['ems_filter']),
+ ('DELETE', 'support/ems/filters/snmp-traphost', SRR['empty_good'])
+ ])
+ module_args = {'state': 'absent'}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_delete_ems_filter_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('DELETE', 'support/ems/filters/snmp-traphost', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['rules'] = DEFAULT_RULE
+ error = expect_and_capture_ansible_exception(my_obj.delete_ems_filter, 'fail')['msg']
+ print('Info: %s' % error)
+ assert 'Error deleting EMS filter snmp-traphost: calling: support/ems/filters/snmp-traphost: got Expected error.' == error
+
+
+def test_modify_ems_filter_add_rule():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'support/ems/filters', SRR['ems_filter']),
+ ('PATCH', 'support/ems/filters/snmp-traphost', SRR['empty_good'])
+ ])
+ module_args = {'rules': DEFAULT_RULE_2_RULES}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_ems_filter_change_type():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'support/ems/filters', SRR['ems_filter_2_riles']),
+ ('PATCH', 'support/ems/filters/snmp-traphost', SRR['empty_good'])
+ ])
+ module_args = {'rules': DEFAULT_RULE_MODIFY_TYPE_2_RULES}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_ems_filter_change_severities():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'support/ems/filters', SRR['ems_filter_2_riles']),
+ ('PATCH', 'support/ems/filters/snmp-traphost', SRR['empty_good'])
+ ])
+ module_args = {'rules': DEFAULT_RULE_MODIFY_SEVERITIES_2_RULES}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_ems_filter_change_name_pattern():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'support/ems/filters', SRR['ems_filter_2_riles']),
+ ('PATCH', 'support/ems/filters/snmp-traphost', SRR['empty_good'])
+ ])
+ module_args = {'rules': DEFAULT_RULE_MODIFY_NAME_PATTERN_2_RULES}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_ems_filter_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('PATCH', 'support/ems/filters/snmp-traphost', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['rules'] = DEFAULT_RULE_2_RULES
+ error = expect_and_capture_ansible_exception(my_obj.modify_ems_filter, 'fail')['msg']
+ print('Info: %s' % error)
+ assert 'Error modifying EMS filter snmp-traphost: calling: support/ems/filters/snmp-traphost: got Expected error.' == error
+
+
+def test_modify_ems_filter_no_rules():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'support/ems/filters', SRR['ems_filter_no_rules']),
+ ])
+ assert not create_and_apply(my_module, DEFAULT_ARGS, {})['changed']
+
+
+def test_modify_star_test():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'support/ems/filters', SRR['ems_filter']),
+ ('PATCH', 'support/ems/filters/snmp-traphost', SRR['empty_good'])
+ ])
+ module_args = {'rules': DEFAULT_RULE_STARS}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_export_policy.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_export_policy.py
new file mode 100644
index 000000000..6d62fc497
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_export_policy.py
@@ -0,0 +1,277 @@
+# (c) 2019-2023, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests for Ansible module: na_ontap_volume_export_policy '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat import unittest
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_export_policy \
+ import NetAppONTAPExportPolicy as policy_module # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+# REST API canned responses when mocking send_request
+SRR = {
+ # common responses
+ 'is_rest': (200, {}, None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'end_of_sequence': (500, None, "Unexpected call to send_request"),
+ 'generic_error': (400, None, "Expected error"),
+ # module specific responses
+ 'get_uuid_policy_id_export_policy': (
+ 200,
+ {
+ "records": [{
+ "svm": {
+ "uuid": "uuid",
+ "name": "svm"},
+ "id": 123,
+ "name": "ansible"
+ }],
+ "num_records": 1}, None),
+ "no_record": (
+ 200,
+ {"num_records": 0},
+ None)
+}
+
+
+class MockONTAPConnection(object):
+ ''' mock server connection to ONTAP host '''
+
+ def __init__(self, kind=None, data=None):
+ ''' save arguments '''
+ self.kind = kind
+ self.params = data
+ self.xml_in = None
+ self.xml_out = None
+
+ def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
+ ''' mock invoke_successfully returning xml data '''
+ self.xml_in = xml
+ if self.kind == 'export_policy':
+ xml = self.build_export_policy_info(self.params)
+ self.xml_out = xml
+ return xml
+
+ @staticmethod
+ def build_export_policy_info(export_policy_details):
+ xml = netapp_utils.zapi.NaElement('xml')
+ data = {'num-records': 1,
+ 'attributes-list': {'export-policy-info': {'name': export_policy_details['name']
+ }}}
+ xml.translate_struct(data)
+ return xml
+
+
+class TestMyModule(unittest.TestCase):
+ ''' Unit tests for na_ontap_job_schedule '''
+
+ def setUp(self):
+ self.mock_export_policy = {
+ 'name': 'test_policy',
+ 'vserver': 'test_vserver'
+ }
+
+ def mock_args(self, rest=False):
+ if rest:
+ return {
+ 'vserver': self.mock_export_policy['vserver'],
+ 'name': self.mock_export_policy['name'],
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!'
+ }
+ else:
+ return {
+ 'vserver': self.mock_export_policy['vserver'],
+ 'name': self.mock_export_policy['name'],
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'use_rest': 'never'
+ }
+
+ def get_export_policy_mock_object(self, cx_type='zapi', kind=None):
+ policy_obj = policy_module()
+ if cx_type == 'zapi':
+ if kind is None:
+ policy_obj.server = MockONTAPConnection()
+ elif kind == 'export_policy':
+ policy_obj.server = MockONTAPConnection(kind='export_policy', data=self.mock_export_policy)
+ return policy_obj
+
+ def test_module_fail_when_required_args_missing(self):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ policy_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_export_policy.NetAppONTAPExportPolicy.create_export_policy')
+ def test_successful_create(self, create_export_policy):
+ ''' Test successful create '''
+ data = self.mock_args()
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_export_policy_mock_object().apply()
+ assert exc.value.args[0]['changed']
+ create_export_policy.assert_called_with()
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_export_policy.NetAppONTAPExportPolicy.get_export_policy')
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_export_policy.NetAppONTAPExportPolicy.rename_export_policy')
+ def test_successful_rename(self, rename_export_policy, get_export_policy):
+ ''' Test successful rename '''
+ data = self.mock_args()
+ data['from_name'] = 'old_policy'
+ set_module_args(data)
+ get_export_policy.side_effect = [
+ None,
+ {'policy-name': 'old_policy'}
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_export_policy_mock_object().apply()
+ assert exc.value.args[0]['changed']
+ rename_export_policy.assert_called_with()
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_successful_create(self, mock_request):
+ '''Test successful rest create'''
+ data = self.mock_args(rest=True)
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['no_record'],
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_export_policy_mock_object(cx_type='rest').apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_successful_delete(self, mock_request):
+ '''Test successful rest delete'''
+ data = self.mock_args(rest=True)
+ data['state'] = 'absent'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_uuid_policy_id_export_policy'],
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_export_policy_mock_object(cx_type='rest').apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_fail_get_export_policy(self, mock_request):
+ '''Test successful rest delete'''
+ data = self.mock_args(rest=True)
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['generic_error'],
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_export_policy_mock_object(cx_type='rest').apply()
+ assert 'Error on fetching export policy: calling: protocols/nfs/export-policies/: got Expected error' in exc.value.args[0]['msg']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_ignore_from_name_when_state_absent(self, mock_request):
+ '''Test from_name is skipped for state absent'''
+ data = self.mock_args(rest=True)
+ data['from_name'] = 'ansible'
+ data['state'] = 'absent'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_uuid_policy_id_export_policy'], # this is record for name, from_name is skipped.
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_export_policy_mock_object(cx_type='rest').apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_successful_rename(self, mock_request):
+ '''Test successful rest rename'''
+ data = self.mock_args(rest=True)
+ data['from_name'] = 'ansible'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['no_record'],
+ SRR['get_uuid_policy_id_export_policy'],
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_export_policy_mock_object(cx_type='rest').apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_error_create(self, mock_request):
+ '''Test error rest create'''
+ data = self.mock_args(rest=True)
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['no_record'],
+ SRR['generic_error'],
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_export_policy_mock_object(cx_type='rest').apply()
+ assert 'Error on creating export policy: calling: protocols/nfs/export-policies: got Expected error.' in exc.value.args[0]['msg']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_error_delete(self, mock_request):
+ '''Test error rest delete'''
+ data = self.mock_args(rest=True)
+ data['state'] = 'absent'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_uuid_policy_id_export_policy'],
+ SRR['generic_error'],
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_export_policy_mock_object(cx_type='rest').apply()
+ print(exc.value.args[0]['msg'])
+ assert 'Error on deleting export policy: calling: protocols/nfs/export-policies/123: got Expected error.' in exc.value.args[0]['msg']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_error_rename(self, mock_request):
+ '''Test error rest rename'''
+ data = self.mock_args(rest=True)
+ data['from_name'] = 'ansible'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['no_record'],
+ SRR['get_uuid_policy_id_export_policy'],
+ SRR['generic_error'],
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_export_policy_mock_object(cx_type='rest').apply()
+ print(exc.value.args[0]['msg'])
+ assert 'Error on renaming export policy: calling: protocols/nfs/export-policies/123: got Expected error.' in exc.value.args[0]['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_export_policy_rule.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_export_policy_rule.py
new file mode 100644
index 000000000..66709fc0b
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_export_policy_rule.py
@@ -0,0 +1,404 @@
+# (c) 2018-2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test template for ONTAP Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_error_message, zapi_responses
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import\
+ call_main, create_module, expect_and_capture_ansible_exception, patch_ansible
+
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_export_policy_rule import NetAppontapExportRule as my_module, main as my_main
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+policy = {
+ 'attributes-list': {
+ 'export-policy-info': {
+ 'policy-name': 'name',
+ 'policy-id': '345'
+ }}}
+
+policy_rule = {
+ 'attributes-list': {
+ 'export-rule-info': {
+ 'policy-name': 'policy_name',
+ 'client-match': 'client_match',
+ 'ro-rule': [{
+ 'security-flavor': 'any'
+ }],
+ 'rw-rule': [{
+ 'security-flavor': 'any'
+ }],
+ 'protocol': [{
+ 'access-protocol': 'protocol'
+ }],
+ 'super-user-security': {
+ 'security-flavor': 'any'
+ },
+ 'is-allow-set-uid-enabled': 'false',
+ 'rule-index': 123,
+ 'anonymous-user-id': 'anonymous_user_id',
+ 'is-allow-dev-is-enabled': 'false',
+ 'export-chown-mode': 'restricted'
+ }}}
+
+policy_rule_two_records = {
+ 'attributes-list': [
+ {'export-rule-info': {
+ 'policy-name': 'policy_name',
+ 'client-match': 'client_match1,client_match2',
+ 'ro-rule': [{
+ 'security-flavor': 'any'
+ }],
+ 'rw-rule': [{
+ 'security-flavor': 'any'
+ }],
+ 'protocol': [{
+ 'access-protocol': 'protocol'
+ }],
+ 'super-user-security': {
+ 'security-flavor': 'any'
+ },
+ 'is-allow-set-uid-enabled': 'false',
+ 'rule-index': 123,
+ 'anonymous-user-id': 'anonymous_user_id',
+ 'is-allow-dev-is-enabled': 'false',
+ 'export-chown-mode': 'restricted'
+ }},
+ {'export-rule-info': {
+ 'policy-name': 'policy_name',
+ 'client-match': 'client_match2,client_match1',
+ 'ro-rule': [{
+ 'security-flavor': 'any'
+ }],
+ 'rw-rule': [{
+ 'security-flavor': 'any'
+ }],
+ 'protocol': [{
+ 'access-protocol': 'protocol'
+ }],
+ 'super-user-security': {
+ 'security-flavor': 'any'
+ },
+ 'is-allow-set-uid-enabled': 'false',
+ 'rule-index': 123,
+ 'anonymous-user-id': 'anonymous_user_id',
+ 'is-allow-dev-is-enabled': 'false',
+ 'export-chown-mode': 'restricted'
+ }}]
+}
+
+
+ZRR = zapi_responses({
+ 'one_policy_record': build_zapi_response(policy, 1),
+ 'one_bad_policy_record': build_zapi_response({'error': 'no_policy_id'}, 1),
+ 'one_rule_record': build_zapi_response(policy_rule, 1),
+ 'two_rule_records': build_zapi_response(policy_rule_two_records, 2),
+})
+
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'use_rest': 'never',
+ 'policy_name': 'policy_name',
+ 'vserver': 'vserver',
+
+}
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ register_responses([
+ ])
+ args = dict(DEFAULT_ARGS)
+ args.pop('vserver')
+ error = 'missing required arguments:'
+ assert error in call_main(my_main, args, fail=True)['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.has_netapp_lib')
+def test_fail_netapp_lib_error(mock_has_netapp_lib):
+ mock_has_netapp_lib.return_value = False
+ error = 'Error: the python NetApp-Lib module is required. Import error: None'
+ assert error in call_main(my_main, DEFAULT_ARGS, fail=True)['msg']
+
+
+def test_get_nonexistent_rule():
+ ''' Test if get_export_policy_rule returns None for non-existent policy '''
+ register_responses([
+ ('ZAPI', 'export-rule-get-iter', ZRR['no_records']),
+ ])
+ module_args = {
+ 'rule_index': 3
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.get_export_policy_rule(3) is None
+
+
+def test_get_nonexistent_policy():
+ ''' Test if get_export_policy returns None for non-existent policy '''
+ register_responses([
+ ('ZAPI', 'export-policy-get-iter', ZRR['no_records']),
+ ])
+ module_args = {
+ 'rule_index': 3
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.set_export_policy_id() is None
+
+
+def test_get_existing_rule():
+ ''' Test if get_export_policy_rule returns rule details for existing policy '''
+ register_responses([
+ ('ZAPI', 'export-rule-get-iter', ZRR['one_rule_record']),
+ ])
+ module_args = {
+ 'rule_index': 3
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ result = my_obj.get_export_policy_rule(3)
+ assert result
+ assert result['name'] == 'policy_name'
+ assert result['client_match'] == ['client_match']
+ assert result['ro_rule'] == ['any']
+
+
+def test_get_existing_policy():
+ ''' Test if get_export_policy returns policy details for existing policy '''
+ register_responses([
+ ('ZAPI', 'export-policy-get-iter', ZRR['one_policy_record']),
+ ])
+ module_args = {
+ 'rule_index': 3
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ my_obj.set_export_policy_id()
+ assert my_obj.policy_id == '345'
+
+
+def test_create_missing_param_error():
+ ''' Test validation error from create '''
+ register_responses([
+ ('ZAPI', 'export-rule-get-iter', ZRR['no_records']),
+ ('ZAPI', 'export-rule-get-iter', ZRR['no_records']),
+ ('ZAPI', 'export-policy-get-iter', ZRR['one_policy_record']),
+ ])
+ module_args = {
+ 'client_match': 'client_match',
+ 'rw_rule': 'any',
+ 'rule_index': 3
+ }
+ msg = 'Error: Missing required option for creating export policy rule: ro_rule'
+ assert call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_successful_create_with_index():
+ ''' Test successful create '''
+ register_responses([
+ ('ZAPI', 'export-rule-get-iter', ZRR['no_records']),
+ ('ZAPI', 'export-rule-get-iter', ZRR['no_records']),
+ ('ZAPI', 'export-policy-get-iter', ZRR['no_records']),
+ ('ZAPI', 'export-policy-create', ZRR['success']),
+ ('ZAPI', 'export-policy-get-iter', ZRR['one_policy_record']),
+ ('ZAPI', 'export-rule-create', ZRR['success']),
+ ])
+ module_args = {
+ 'client_match': 'client_match',
+ 'rw_rule': 'any',
+ 'ro_rule': 'any',
+ 'rule_index': 123
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successful_create_no_index():
+ ''' Test successful create '''
+ register_responses([
+ ('ZAPI', 'export-rule-get-iter', ZRR['no_records']),
+ ('ZAPI', 'export-policy-get-iter', ZRR['one_policy_record']),
+ ('ZAPI', 'export-rule-create', ZRR['success']),
+ ])
+ module_args = {
+ 'client_match': 'client_match',
+ 'rw_rule': 'any',
+ 'ro_rule': 'any'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_create_idempotency():
+ ''' Test create idempotency '''
+ register_responses([
+ ('ZAPI', 'export-rule-get-iter', ZRR['one_rule_record']),
+ ])
+ module_args = {
+ 'client_match': 'client_match',
+ 'rw_rule': 'any',
+ 'ro_rule': 'any'
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_delete():
+ ''' Test delete '''
+ register_responses([
+ ('ZAPI', 'export-rule-get-iter', ZRR['one_rule_record']),
+ ('ZAPI', 'export-policy-get-iter', ZRR['one_policy_record']),
+ ('ZAPI', 'export-rule-destroy', ZRR['success']),
+ ])
+ module_args = {
+ 'state': 'absent',
+ 'rule_index': 3
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_delete_idempotency():
+ ''' Test delete idempotency '''
+ register_responses([
+ ('ZAPI', 'export-rule-get-iter', ZRR['no_records']),
+ ])
+ module_args = {
+ 'state': 'absent',
+ 'rule_index': 3
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successful_modify():
+ ''' Test successful modify protocol '''
+ register_responses([
+ ('ZAPI', 'export-rule-get-iter', ZRR['one_rule_record']),
+ ('ZAPI', 'export-policy-get-iter', ZRR['one_policy_record']),
+ ('ZAPI', 'export-rule-modify', ZRR['success']),
+ ])
+ module_args = {
+ 'protocol': ['cifs'],
+ 'allow_suid': True,
+ 'rule_index': 3,
+ 'allow_device_creation': True,
+ 'chown_mode': 'unrestricted'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_on_ambiguous_delete():
+ ''' Test error if multiple entries match for a delete '''
+ register_responses([
+ ('ZAPI', 'export-rule-get-iter', ZRR['two_rule_records']),
+ ])
+ module_args = {
+ 'state': 'absent',
+ 'client_match': 'client_match1,client_match2',
+ 'rw_rule': 'any',
+ 'ro_rule': 'any'
+ }
+ error = "Error multiple records exist for query:"
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_helper_query_parameters():
+ ''' Test helper method set_query_parameters() '''
+ register_responses([
+ ])
+ module_args = {
+ 'client_match': 'client_match1,client_match2',
+ 'rw_rule': 'any',
+ 'ro_rule': 'any'
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ result = my_obj.set_query_parameters(10)
+ print(result)
+ assert 'query' in result
+ assert 'export-rule-info' in result['query']
+ assert result['query']['export-rule-info']['rule-index'] == 10
+ result = my_obj.set_query_parameters(None)
+ print(result)
+ assert 'client-match' not in result['query']['export-rule-info']
+ assert result['query']['export-rule-info']['rw-rule'] == [{'security-flavor': 'any'}]
+
+
+def test_error_calling_zapis():
+ ''' Test error handing '''
+ register_responses([
+ ('ZAPI', 'export-rule-get-iter', ZRR['error']),
+ ('ZAPI', 'export-policy-get-iter', ZRR['error']),
+ ('ZAPI', 'export-policy-get-iter', ZRR['one_bad_policy_record']),
+ ('ZAPI', 'export-rule-create', ZRR['error']),
+ ('ZAPI', 'export-policy-create', ZRR['error']),
+ ('ZAPI', 'export-rule-destroy', ZRR['error']),
+ ('ZAPI', 'export-rule-modify', ZRR['error']),
+ ('ZAPI', 'export-rule-set-index', ZRR['error']),
+ ])
+ module_args = {
+ 'client_match': 'client_match1,client_match2',
+ 'rw_rule': 'any',
+ 'ro_rule': 'any',
+ 'from_rule_index': 123,
+ 'rule_index': 124,
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ error = zapi_error_message('Error getting export policy rule policy_name')
+ assert error in expect_and_capture_ansible_exception(my_obj.get_export_policy_rule, 'fail', None)['msg']
+ error = zapi_error_message('Error getting export policy policy_name')
+ assert error in expect_and_capture_ansible_exception(my_obj.set_export_policy_id, 'fail')['msg']
+ error = 'Error getting export policy id for policy_name: got'
+ assert error in expect_and_capture_ansible_exception(my_obj.set_export_policy_id, 'fail')['msg']
+ error = zapi_error_message('Error creating export policy rule policy_name')
+ assert error in expect_and_capture_ansible_exception(my_obj.create_export_policy_rule, 'fail')['msg']
+ error = zapi_error_message('Error creating export policy policy_name')
+ assert error in expect_and_capture_ansible_exception(my_obj.create_export_policy, 'fail')['msg']
+ error = zapi_error_message('Error deleting export policy rule policy_name')
+ assert error in expect_and_capture_ansible_exception(my_obj.delete_export_policy_rule, 'fail', 123)['msg']
+ error = zapi_error_message('Error modifying export policy rule index 123')
+ assert error in expect_and_capture_ansible_exception(my_obj.modify_export_policy_rule, 'fail', {'rw_rule': ['any']}, 123)['msg']
+ error = zapi_error_message('Error reindexing export policy rule index 123')
+ assert error in expect_and_capture_ansible_exception(my_obj.modify_export_policy_rule, 'fail', {'rule_index': 123}, 123, True)['msg']
+
+
+def test_index_existing_entry():
+ """ validate entry can be found without index, and add index """
+ register_responses([
+ ('ZAPI', 'export-rule-get-iter', ZRR['no_records']),
+ ('ZAPI', 'export-rule-get-iter', ZRR['one_rule_record']),
+ ('ZAPI', 'export-policy-get-iter', ZRR['one_policy_record']),
+ ('ZAPI', 'export-rule-set-index', ZRR['success']),
+ ])
+ module_args = {
+ 'client_match': 'client_match',
+ 'rw_rule': 'any',
+ 'ro_rule': 'any',
+ 'rule_index': 124,
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_delete_no_index():
+ """ validate entry can be found without index, and deleted """
+ register_responses([
+ ('ZAPI', 'export-rule-get-iter', ZRR['two_rule_records']),
+ ('ZAPI', 'export-policy-get-iter', ZRR['one_policy_record']),
+ ('ZAPI', 'export-rule-destroy', ZRR['success']),
+ ])
+ module_args = {
+ 'client_match': 'client_match2,client_match1',
+ 'rw_rule': 'any',
+ 'ro_rule': 'any',
+ 'state': 'absent',
+ 'force_delete_on_first_match': True,
+ 'allow_suid': False
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_export_policy_rule_rest.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_export_policy_rule_rest.py
new file mode 100644
index 000000000..b1fb870e5
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_export_policy_rule_rest.py
@@ -0,0 +1,387 @@
+# (c) 2021, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test template for ONTAP Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import copy
+import pytest
+import sys
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import \
+ call_main, patch_ansible, create_and_apply, create_module, expect_and_capture_ansible_exception
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import \
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_error_message, rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_export_policy_rule \
+ import NetAppontapExportRule as policy_rule, main as my_main # module under test
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+# REST API canned responses when mocking send_request.
+# The rest_factory provides default responses shared across testcases.
+SRR = rest_responses({
+ 'get_uuid_policy_id_export_policy': (200, {"records": [
+ {
+ "svm": {"uuid": "uuid", "name": "svm"},
+ "id": 123,
+ "name": "ansible"
+ }], "num_records": 1}, None),
+ 'get_export_policy_rules': (200, {"records": [
+ {
+ "rw_rule": ["any"],
+ "_links": {"self": {"href": "/api/resourcelink"}},
+ "ro_rule": ["any"],
+ "allow_suid": True,
+ "chown_mode": "restricted",
+ "index": 10,
+ "superuser": ["any"],
+ "protocols": ["any"],
+ "anonymous_user": "1234",
+ "clients": [{"match": "10.10.0.0/16"}, {"match": "10.0.0.0/16"}, {"match": "10.20.0.0/16"}],
+ "ntfs_unix_security": "fail",
+ "allow_device_creation": True
+ }], "num_records": 1}, None),
+ 'get_export_policy_two_rules': (200, {"records": [
+ {
+ "rw_rule": ["any"],
+ "_links": {"self": {"href": "/api/resourcelink"}},
+ "ro_rule": ["any"],
+ "allow_suid": True,
+ "chown_mode": "restricted",
+ "index": 10,
+ "superuser": ["any"],
+ "protocols": ["any"],
+ "anonymous_user": "1234",
+ "clients": [{"match": "0.0.0.0/0"}],
+ "ntfs_unix_security": "fail",
+ "allow_device_creation": True
+ },
+ {
+ "rw_rule": ["any"],
+ "ro_rule": ["any"],
+ "allow_suid": True,
+ "chown_mode": "restricted",
+ "index": 11,
+ "superuser": ["any"],
+ "protocols": ["any"],
+ "anonymous_user": "1234",
+ "clients": [{"match": "0.0.0.0/0"}],
+ "ntfs_unix_security": "fail",
+ "allow_device_creation": True
+ }], "num_records": 2}, None),
+ 'create_export_policy_rules': (200, {"records": [
+ {
+ "rw_rule": ["any"],
+ "_links": {"self": {"href": "/api/resourcelink"}},
+ "ro_rule": ["any"],
+ "allow_suid": True,
+ "chown_mode": "restricted",
+ "index": 1,
+ "superuser": ["any"],
+ "protocols": ["any"],
+ "anonymous_user": "1234",
+ "clients": [{"match": "0.0.0.0/0"}],
+ "ntfs_unix_security": "fail",
+ "allow_device_creation": True
+ }], "num_records": 1}, None),
+ 'error_does_not_exist': (400, None, {'message': "entry doesn't exist"})
+})
+
+
+DEFAULT_ARGS = {
+ 'name': 'test',
+ 'client_match': ['1.1.1.0', '0.0.0.0/0'],
+ 'vserver': 'test',
+ 'protocol': 'nfs',
+ 'anonymous_user_id': '65534',
+ 'super_user_security': ['any'],
+ 'ntfs_unix_security': 'fail',
+ 'ro_rule': 'any',
+ 'rw_rule': 'any',
+ 'allow_device_creation': True,
+ 'allow_suid': True,
+ 'chown_mode': 'restricted',
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'use_rest': 'always',
+}
+
+
+def test_rest_successful_create_rule():
+ '''Test successful rest create'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'protocols/nfs/export-policies', SRR['get_uuid_policy_id_export_policy']),
+ ('GET', 'protocols/nfs/export-policies/123/rules/10', SRR['empty_records']),
+ ('GET', 'protocols/nfs/export-policies/123/rules', SRR['empty_records']),
+ ('POST', 'protocols/nfs/export-policies/123/rules?return_records=true', SRR['create_export_policy_rules']),
+ ('PATCH', 'protocols/nfs/export-policies/123/rules/1', SRR['empty_records'])
+ ])
+ assert create_and_apply(policy_rule, DEFAULT_ARGS, {'rule_index': 10})['changed']
+
+
+def test_rest_error_get_policy():
+ '''Test error rest get'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'protocols/nfs/export-policies', SRR['generic_error'])
+ ])
+ my_module_object = create_module(policy_rule, DEFAULT_ARGS)
+ msg = 'Error on fetching export policy: calling: protocols/nfs/export-policies: got Expected error.'
+ assert msg in expect_and_capture_ansible_exception(my_module_object.get_export_policy_rule_rest, 'fail', 1)['msg']
+
+
+def test_rest_error_get_rule():
+ '''Test error rest create'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'protocols/nfs/export-policies', SRR['get_uuid_policy_id_export_policy']),
+ ('GET', 'protocols/nfs/export-policies/123/rules/10', SRR['generic_error']),
+ # 2nd try - this time without index
+ ('GET', 'protocols/nfs/export-policies/123/rules', SRR['generic_error']),
+ # 3rd try
+ ('GET', 'protocols/nfs/export-policies/123/rules', SRR['error_does_not_exist']),
+ # 4thtry
+ ('GET', 'protocols/nfs/export-policies/123/rules', SRR['get_export_policy_two_rules']),
+ ])
+ module_args = {
+ 'anonymous_user_id': '1234',
+ 'protocol': 'any',
+ 'super_user_security': 'any',
+ 'client_match': ['0.0.0.0/0'],
+ 'ntfs_unix_security': 'fail',
+ 'ro_rule': ['any'],
+ 'rw_rule': ['any'],
+ 'rule_index': 10
+ }
+ my_module_object = create_module(policy_rule, DEFAULT_ARGS, module_args)
+ msg = rest_error_message('Error on fetching export policy rule', 'protocols/nfs/export-policies/123/rules/10')
+ assert msg in expect_and_capture_ansible_exception(my_module_object.get_export_policy_rule, 'fail', 10)['msg']
+ # error with no index
+ msg = rest_error_message('Error on fetching export policy rules', 'protocols/nfs/export-policies/123/rules')
+ assert msg in expect_and_capture_ansible_exception(my_module_object.get_export_policy_rule, 'fail', None)['msg']
+ # does not exist error is ignored
+ assert my_module_object.get_export_policy_rule(None) is None
+ # multiple entries error
+ msg = 'Error multiple records exist for query:'
+ assert msg in expect_and_capture_ansible_exception(my_module_object.get_export_policy_rule, 'fail', None)['msg']
+
+
+def test_rest_error_create_rule():
+ '''Test error rest create'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'protocols/nfs/export-policies', SRR['get_uuid_policy_id_export_policy']),
+ ('GET', 'protocols/nfs/export-policies/123/rules/10', SRR['empty_records']),
+ ('GET', 'protocols/nfs/export-policies/123/rules', SRR['empty_records']),
+ ('POST', 'protocols/nfs/export-policies/123/rules?return_records=true', SRR['generic_error']),
+ # 2nd call
+ ('GET', 'protocols/nfs/export-policies/123/rules/10', SRR['empty_records']),
+ ('GET', 'protocols/nfs/export-policies/123/rules', SRR['empty_records']),
+ ('POST', 'protocols/nfs/export-policies/123/rules?return_records=true', SRR['empty_records'])
+ ])
+ my_module_object = create_module(policy_rule, DEFAULT_ARGS, {'rule_index': 10})
+ msg = rest_error_message('Error on creating export policy rule', 'protocols/nfs/export-policies/123/rules?return_records=true')
+ assert msg in expect_and_capture_ansible_exception(my_module_object.apply, 'fail')['msg']
+ msg = 'Error on creating export policy rule, returned response is invalid:'
+ assert msg in expect_and_capture_ansible_exception(my_module_object.apply, 'fail')['msg']
+
+
+def test_rest_successful_delete_rule():
+ '''Test successful rest create'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'protocols/nfs/export-policies', SRR['get_uuid_policy_id_export_policy']),
+ ('GET', 'protocols/nfs/export-policies/123/rules/10', copy.deepcopy(SRR['get_export_policy_rules'])),
+ ('DELETE', 'protocols/nfs/export-policies/123/rules/10', SRR['empty_good'])
+ ])
+ assert create_and_apply(policy_rule, DEFAULT_ARGS, {'rule_index': 10, 'state': 'absent'})['changed']
+
+
+def test_rest_error_delete():
+ '''Test error rest create'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'protocols/nfs/export-policies', SRR['get_uuid_policy_id_export_policy']),
+ ('GET', 'protocols/nfs/export-policies/123/rules/10', copy.deepcopy(SRR['get_export_policy_rules'])),
+ ('DELETE', 'protocols/nfs/export-policies/123/rules/10', SRR['generic_error'])
+ ])
+ my_module_object = create_module(policy_rule, DEFAULT_ARGS, {'rule_index': 10, 'state': 'absent'})
+ msg = 'Error on deleting export policy Rule: calling: protocols/nfs/export-policies/123/rules/10: got Expected error.'
+ assert msg in expect_and_capture_ansible_exception(my_module_object.apply, 'fail')['msg']
+
+
+def test_rest_successful_create_policy_and_rule():
+ '''Test successful rest create'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'protocols/nfs/export-policies', SRR['empty_records']),
+ ('GET', 'protocols/nfs/export-policies', SRR['empty_records']),
+ ('GET', 'protocols/nfs/export-policies', SRR['empty_records']),
+ ('POST', 'protocols/nfs/export-policies', SRR['empty_good']),
+ ('GET', 'protocols/nfs/export-policies', SRR['get_uuid_policy_id_export_policy']),
+ ('POST', 'protocols/nfs/export-policies/123/rules?return_records=true', SRR['create_export_policy_rules']),
+ ('PATCH', 'protocols/nfs/export-policies/123/rules/1', SRR['empty_records'])
+ ])
+ assert create_and_apply(policy_rule, DEFAULT_ARGS, {'rule_index': 10})['changed']
+
+
+def test_rest_error_creating_policy():
+ '''Test error rest create'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'protocols/nfs/export-policies', SRR['empty_records']),
+ ('GET', 'protocols/nfs/export-policies', SRR['empty_records']),
+ ('POST', 'protocols/nfs/export-policies', SRR['generic_error']),
+ ])
+ my_module_object = create_module(policy_rule, DEFAULT_ARGS)
+ msg = 'Error on creating export policy: calling: protocols/nfs/export-policies: got Expected error.'
+ assert msg in expect_and_capture_ansible_exception(my_module_object.apply, 'fail')['msg']
+
+
+def test_rest_successful_modify():
+ '''Test successful rest create'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'protocols/nfs/export-policies', SRR['get_uuid_policy_id_export_policy']),
+ ('GET', 'protocols/nfs/export-policies/123/rules/10', copy.deepcopy(SRR['get_export_policy_rules'])),
+ ('PATCH', 'protocols/nfs/export-policies/123/rules/10', SRR['empty_good'])
+ ])
+ module_args = {
+ 'anonymous_user_id': '1234',
+ 'protocol': 'nfs4',
+ 'super_user_security': 'krb5i',
+ 'client_match': ['1.1.1.3', '1.1.0.3'],
+ 'ntfs_unix_security': 'ignore',
+ 'ro_rule': ['never'],
+ 'rw_rule': ['never'],
+ 'rule_index': 10,
+ 'allow_device_creation': False,
+ 'allow_suid': False,
+ 'chown_mode': 'unrestricted'
+ }
+ assert create_and_apply(policy_rule, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_error_modify():
+ '''Test error rest create'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'protocols/nfs/export-policies', SRR['get_uuid_policy_id_export_policy']),
+ ('GET', 'protocols/nfs/export-policies/123/rules/10', copy.deepcopy(SRR['get_export_policy_rules'])),
+ ('PATCH', 'protocols/nfs/export-policies/123/rules/10', SRR['generic_error'])
+ ])
+ module_args = {
+ 'anonymous_user_id': '1234',
+ 'protocol': 'nfs4',
+ 'super_user_security': 'krb5i',
+ 'rule_index': 10
+ }
+
+ my_module_object = create_module(policy_rule, DEFAULT_ARGS, module_args)
+ msg = 'Error on modifying export policy Rule: calling: protocols/nfs/export-policies/123/rules/10: got Expected error.'
+ assert msg in expect_and_capture_ansible_exception(my_module_object.apply, 'fail')['msg']
+
+
+def test_rest_successful_rename():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'protocols/nfs/export-policies', SRR['get_uuid_policy_id_export_policy']),
+ ('GET', 'protocols/nfs/export-policies/123/rules/2', SRR['empty_records']),
+ ('GET', 'protocols/nfs/export-policies/123/rules/10', copy.deepcopy(SRR['get_export_policy_rules'])),
+ ('PATCH', 'protocols/nfs/export-policies/123/rules/10', SRR['empty_records'])
+ ])
+ module_args = {
+ 'anonymous_user_id': '1234',
+ 'protocol': 'nfs4',
+ 'super_user_security': 'krb5i',
+ 'client_match': ['1.1.1.3', '1.1.0.3'],
+ 'ntfs_unix_security': 'ignore',
+ 'ro_rule': ['never'],
+ 'rw_rule': ['never'],
+ 'rule_index': 2,
+ 'from_rule_index': 10,
+ 'allow_device_creation': False,
+ 'allow_suid': False,
+ 'chown_mode': 'unrestricted'
+ }
+ assert create_and_apply(policy_rule, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_successful_rename_no_from_index():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'protocols/nfs/export-policies', SRR['get_uuid_policy_id_export_policy']),
+ ('GET', 'protocols/nfs/export-policies/123/rules/2', SRR['error_does_not_exist']),
+ ('GET', 'protocols/nfs/export-policies/123/rules', copy.deepcopy(SRR['get_export_policy_rules'])),
+ ('PATCH', 'protocols/nfs/export-policies/123/rules/10', SRR['empty_records'])
+ ])
+ module_args = {
+ 'anonymous_user_id': '1234',
+ 'protocol': 'any',
+ 'super_user_security': 'any',
+ 'client_match': ["10.10.0.0/16", "10.20.0.0/16", "10.0.0.0/16"],
+ 'ntfs_unix_security': 'fail',
+ 'ro_rule': ['any'],
+ 'rw_rule': ['any'],
+ 'rule_index': 2
+ }
+ assert create_and_apply(policy_rule, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_error_rename_with_from_index_not_found():
+ """ rename is requested but from rule is not found """
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'protocols/nfs/export-policies', SRR['get_uuid_policy_id_export_policy']),
+ ('GET', 'protocols/nfs/export-policies/123/rules/3', SRR['error_does_not_exist']),
+ ('GET', 'protocols/nfs/export-policies/123/rules/2', SRR['error_does_not_exist']),
+ ])
+ module_args = {
+ 'anonymous_user_id': '1234',
+ 'protocol': 'nfs4',
+ 'super_user_security': 'krb5i',
+ 'client_match': ['1.1.1.3', '1.1.0.3'],
+ 'ntfs_unix_security': 'ignore',
+ 'ro_rule': ['never'],
+ 'rw_rule': ['never'],
+ 'rule_index': 3,
+ 'from_rule_index': 2,
+ }
+ msg = 'Error reindexing: export policy rule 2 does not exist.'
+ assert msg in create_and_apply(policy_rule, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_rest_delete_no_index_multiple():
+ """ delete is requested but 2 rules are found """
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'protocols/nfs/export-policies', SRR['get_uuid_policy_id_export_policy']),
+ ('GET', 'protocols/nfs/export-policies/123/rules', SRR['get_export_policy_two_rules']),
+ # 2nd run
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'protocols/nfs/export-policies', SRR['get_uuid_policy_id_export_policy']),
+ ('GET', 'protocols/nfs/export-policies/123/rules', SRR['get_export_policy_two_rules']),
+ ('DELETE', 'protocols/nfs/export-policies/123/rules/10', SRR['success'])
+ ])
+ module_args = {
+ 'anonymous_user_id': '1234',
+ 'protocol': 'any',
+ 'super_user_security': 'any',
+ 'client_match': ['0.0.0.0/0'],
+ 'ntfs_unix_security': 'fail',
+ 'ro_rule': ['any'],
+ 'rw_rule': ['any'],
+ 'state': 'absent'
+ }
+ msg = 'Error multiple records exist for query:'
+ assert msg in create_and_apply(policy_rule, DEFAULT_ARGS, module_args, fail=True)['msg']
+ module_args['force_delete_on_first_match'] = True
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_fcp_rest.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_fcp_rest.py
new file mode 100644
index 000000000..4bd7c35a8
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_fcp_rest.py
@@ -0,0 +1,231 @@
+# (c) 2021, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test template for ONTAP Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import copy
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat import unittest
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_fcp \
+ import NetAppOntapFCP as fcp # module under test
+
+# REST API canned responses when mocking send_request
+SRR = {
+ # common responses
+ 'is_rest': (200, {}, None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'end_of_sequence': (500, None, "Unexpected call to send_request"),
+ 'generic_error': (400, None, "Expected error"),
+ # module specific responses
+ 'fcp_record': (
+ 200,
+ {
+ "records": [
+ {
+ "svm": {
+ "uuid": "671aa46e-11ad-11ec-a267-005056b30cfa",
+ "name": "ansibleSVM"
+ },
+ "enabled": True,
+ "target": {
+ "name": "20:05:00:50:56:b3:0c:fa"
+ }
+ }
+ ],
+ "num_records": 1
+ }, None
+ ),
+ 'fcp_record_disabled': (
+ 200,
+ {
+ "records": [
+ {
+ "svm": {
+ "uuid": "671aa46e-11ad-11ec-a267-005056b30cfa",
+ "name": "ansibleSVM"
+ },
+ "enabled": False,
+ "target": {
+ "name": "20:05:00:50:56:b3:0c:fa"
+ }
+ }
+ ],
+ "num_records": 1
+ }, None
+ ),
+ "no_record": (
+ 200,
+ {"num_records": 0},
+ None)
+}
+
+
+class MockONTAPConnection(object):
+ ''' mock server connection to ONTAP host '''
+
+ def __init__(self, kind=None, data=None):
+ ''' save arguments '''
+ self.kind = kind
+ self.data = data
+ self.xml_in = None
+ self.xml_out = None
+
+
+class TestMyModule(unittest.TestCase):
+ ''' a group of related Unit Tests '''
+
+ def setUp(self):
+ self.server = MockONTAPConnection()
+ self.mock_rule = {}
+
+ def mock_args(self, rest=False):
+ if rest:
+ return {
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'use_rest': 'always',
+ 'vserver': 'test_vserver',
+ }
+
+ def get_mock_object(self, kind=None):
+ """
+ Helper method to return an na_ontap_firewall_policy object
+ :param kind: passes this param to MockONTAPConnection()
+ :return: na_ontap_firewall_policy object
+ """
+ obj = fcp()
+ return obj
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_error_get(self, mock_request):
+ '''Test error rest create'''
+ data = self.mock_args(rest=True)
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['generic_error'],
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_mock_object().apply()
+ assert 'Error on fetching fcp: calling: protocols/san/fcp/services: got Expected error.' in exc.value.args[0]['msg']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_successful_create(self, mock_request):
+ '''Test successful rest create'''
+ data = self.mock_args(rest=True)
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['no_record'],
+ SRR['empty_good']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_mock_object().apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_error_create(self, mock_request):
+ '''Test error rest create'''
+ data = self.mock_args(rest=True)
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['no_record'],
+ SRR['generic_error'],
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_mock_object().apply()
+ assert 'Error on creating fcp: calling: protocols/san/fcp/services: got Expected error.' in exc.value.args[0]['msg']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_successful_delete(self, mock_request):
+ '''Test successful rest delete'''
+ data = self.mock_args(rest=True)
+ data['state'] = 'absent'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ # the module under test modifies record directly, and may cause other tests to fail
+ copy.deepcopy(SRR['fcp_record']),
+ SRR['empty_good'],
+ SRR['empty_good']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_mock_object().apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_error_delete(self, mock_request):
+ '''Test error rest delete'''
+ data = self.mock_args(rest=True)
+ data['state'] = 'absent'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ copy.deepcopy(SRR['fcp_record']),
+ SRR['empty_good'],
+ SRR['generic_error'],
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_mock_object().apply()
+ assert 'Error on deleting fcp policy: calling: ' + \
+ 'protocols/san/fcp/services/671aa46e-11ad-11ec-a267-005056b30cfa: got Expected error.' in exc.value.args[0]['msg']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_successful_disable(self, mock_request):
+ '''Test successful rest disable'''
+ data = self.mock_args(rest=True)
+ data['status'] = 'down'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ copy.deepcopy(SRR['fcp_record']),
+ SRR['empty_good'],
+
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_mock_object().apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_successful_enable(self, mock_request):
+ '''Test successful rest enable'''
+ data = self.mock_args(rest=True)
+ data['status'] = 'up'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ copy.deepcopy(SRR['fcp_record_disabled']),
+ SRR['empty_good'],
+
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_mock_object().apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_error_enabled_change(self, mock_request):
+ '''Test error rest change'''
+ data = self.mock_args(rest=True)
+ data['status'] = 'down'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ copy.deepcopy(SRR['fcp_record']),
+ SRR['generic_error'],
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_mock_object().apply()
+ assert 'Error on modifying fcp: calling: ' + \
+ 'protocols/san/fcp/services/671aa46e-11ad-11ec-a267-005056b30cfa: ' + \
+ 'got Expected error.' in exc.value.args[0]['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_fdsd.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_fdsd.py
new file mode 100644
index 000000000..5076af5f9
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_fdsd.py
@@ -0,0 +1,136 @@
+# (c) 2021, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test for ONTAP na_ontap_fdsd Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+import sys
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_fdsd \
+ import NetAppOntapFDSD as my_module # module under test
+
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+
+def default_args():
+ args = {
+ 'name': 'test',
+ 'vserver': 'vserver1',
+ 'hostname': '10.10.10.10',
+ 'username': 'username',
+ 'password': 'password',
+ 'use_rest': 'always'
+ }
+ return args
+
+
+# REST API canned responses when mocking send_request
+SRR = {
+ # common responses
+ 'is_rest': (200, dict(version=dict(generation=9, major=9, minor=0, full='dummy')), None),
+ 'is_rest_9_8': (200, dict(version=dict(generation=9, major=8, minor=0, full='dummy')), None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'zero_record': (200, dict(records=[], num_records=0), None),
+ 'one_record_uuid': (200, dict(records=[dict(uuid='a1b2c3')], num_records=1), None),
+ 'end_of_sequence': (500, None, "Unexpected call to send_request"),
+ 'generic_error': (400, None, "Expected error"),
+ 'ntfs_record': (
+ 200, {
+ 'records': [{
+ 'vserver': 'vserver1',
+ 'ntfs_sd': 'sd1'}],
+ 'num_records': 1},
+ None),
+
+}
+
+
+def test_module_fail_when_required_args_missing(patch_ansible):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ my_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+
+def test_rest_missing_arguments(patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' test missing arguements '''
+ args = dict(default_args())
+ del args['hostname']
+ set_module_args(args)
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_module()
+ msg = 'missing required arguments: hostname'
+ assert exc.value.args[0]['msg'] == msg
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_remove(mock_request, patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' remove Security Descriptor '''
+ args = dict(default_args())
+ args['state'] = 'absent'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['ntfs_record'],
+ SRR['empty_good'], # delete
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is True
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 3
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_no_action(mock_request, patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' Idempotent test '''
+ args = dict(default_args())
+ args['name'] = 'sd1'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['ntfs_record'],
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is False
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 2
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_create(mock_request, patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' Create security descriptor'''
+ args = dict(default_args())
+ args['name'] = 'new_sd'
+ print(args)
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['zero_record'],
+ SRR['empty_good'], # create
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 3
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_fdsp.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_fdsp.py
new file mode 100644
index 000000000..d523e7062
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_fdsp.py
@@ -0,0 +1,134 @@
+# (c) 2021, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test for ONTAP na_ontap_fdsp Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+import sys
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_fdsp \
+ import NetAppOntapFDSP as my_module # module under test
+
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+
+def default_args():
+ args = {
+ 'name': 'test',
+ 'vserver': 'vserver1',
+ 'hostname': '10.10.10.10',
+ 'username': 'username',
+ 'password': 'password',
+ 'use_rest': 'always'
+ }
+ return args
+
+
+# REST API canned responses when mocking send_request
+SRR = {
+ # common responses
+ 'is_rest': (200, dict(version=dict(generation=9, major=9, minor=0, full='dummy')), None),
+ 'is_rest_9_8': (200, dict(version=dict(generation=9, major=8, minor=0, full='dummy')), None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'zero_record': (200, dict(records=[], num_records=0), None),
+ 'one_record_uuid': (200, dict(records=[dict(uuid='a1b2c3')], num_records=1), None),
+ 'end_of_sequence': (500, None, "Unexpected call to send_request"),
+ 'generic_error': (400, None, "Expected error"),
+ 'security_policy_record': (
+ 200, {
+ 'records': [{
+ 'vserver': 'vserver1',
+ 'policy_name': 'test'}],
+ 'num_records': 1},
+ None),
+
+}
+
+
+def test_module_fail_when_required_args_missing(patch_ansible):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ my_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+
+def test_rest_missing_arguments(patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' test missing arguements '''
+ args = dict(default_args())
+ del args['hostname']
+ set_module_args(args)
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_module()
+ msg = 'missing required arguments: hostname'
+ assert exc.value.args[0]['msg'] == msg
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_create(mock_request, patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' Create security policies'''
+ args = dict(default_args())
+ args['name'] = 'new_security_policy'
+ print(args)
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['zero_record'],
+ SRR['empty_good'], # create
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 3
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_remove(mock_request, patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' remove Security policies '''
+ args = dict(default_args())
+ args['state'] = 'absent'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['security_policy_record'],
+ SRR['empty_good'], # delete
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is True
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 3
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_no_action(mock_request, patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' Idempotent test '''
+ args = dict(default_args())
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['security_policy_record'],
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is False
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 2
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_fdss.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_fdss.py
new file mode 100644
index 000000000..22e06fc1f
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_fdss.py
@@ -0,0 +1,102 @@
+# (c) 2021, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test for ONTAP na_ontap_fdsg Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+import sys
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_fdss \
+ import NetAppOntapFDSS as my_module # module under test
+
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+
+def default_args():
+ args = {
+ 'hostname': '10.10.10.10',
+ 'username': 'username',
+ 'password': 'password',
+ 'vserver': 'vserver1',
+ 'name': 'policy1'
+ }
+ return args
+
+
+# REST API canned responses when mocking send_request
+SRR = {
+ # common responses
+ 'is_rest': (200, dict(version=dict(generation=9, major=9, minor=0, full='dummy')), None),
+ 'is_rest_9_8': (200, dict(version=dict(generation=9, major=8, minor=0, full='dummy')), None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'zero_record': (200, dict(records=[], num_records=0), None),
+ 'one_record_uuid': (200, dict(records=[dict(uuid='a1b2c3')], num_records=1), None),
+ 'end_of_sequence': (500, None, "Unexpected call to send_request"),
+ 'generic_error': (400, None, "Expected error"),
+ 'job_id_record': (
+ 200, {
+ 'job': {
+ 'uuid': '94b6e6a7-d426-11eb-ac81-00505690980f',
+ '_links': {'self': {'href': '/api/cluster/jobs/94b6e6a7-d426-11eb-ac81-00505690980f'}}},
+ 'cli_output': ' Use the "job show -id 2379" command to view the status of this operation.'}, None),
+ 'job_response_record': (
+ 200, {
+ "uuid": "f03ccbb6-d8bb-11eb-ac81-00505690980f",
+ "description": "File Directory Security Apply Job",
+ "state": "success",
+ "message": "Complete: Operation completed successfully. File ACLs modified using policy \"policy1\" on Vserver \"GBSMNAS80LD\". File count: 0. [0]",
+ "code": 0,
+ "start_time": "2021-06-29T05:25:26-04:00",
+ "end_time": "2021-06-29T05:25:26-04:00"
+ }, None
+ )
+}
+
+
+def test_module_fail_when_required_args_missing(patch_ansible):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ my_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+
+def test_rest_missing_arguments(patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' test missing arguements '''
+ args = dict(default_args())
+ del args['hostname']
+ set_module_args(args)
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_module()
+ msg = 'missing required arguments: hostname'
+ assert exc.value.args[0]['msg'] == msg
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_success(mock_request, patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' Create job to apply policy to directory '''
+ args = dict(default_args())
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['job_id_record'],
+ SRR['job_response_record'],
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is True
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 3
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_file_directory_policy.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_file_directory_policy.py
new file mode 100644
index 000000000..94af48ed8
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_file_directory_policy.py
@@ -0,0 +1,136 @@
+# (c) 2020, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests for Ansible module: na_ontap_file_directory_policy '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat import unittest
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_file_directory_policy \
+ import NetAppOntapFilePolicy as policy_module # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+class MockONTAPConnection(object):
+ ''' mock server connection to ONTAP host '''
+
+ def __init__(self, kind=None, data=None):
+ ''' save arguments '''
+ self.kind = kind
+ self.params = data
+ self.xml_in = None
+ self.xml_out = None
+
+ def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
+ ''' mock invoke_successfully returning xml data '''
+ self.xml_in = xml
+ request = xml.to_string().decode('utf-8')
+ if self.kind == 'error':
+ raise netapp_utils.zapi.NaApiError('test', 'expect error')
+ elif request.startswith("<ems-autosupport-log>"):
+ xml = None # or something that may the logger happy, and you don't need @patch anymore
+ # or
+ # xml = build_ems_log_response()
+ elif request.startswith("<file-directory-security-policy-get-iter>"):
+ if self.kind == 'create':
+ xml = self.build_sd_info()
+ else:
+ xml = self.build_sd_info(self.params)
+ elif request.startswith("<file-directory-security-ntfs-modify>"):
+ xml = self.build_sd_info(self.params)
+ self.xml_out = xml
+ return xml
+
+ @staticmethod
+ def build_sd_info(data=None):
+ xml = netapp_utils.zapi.NaElement('xml')
+ attributes = {}
+ if data is not None:
+ attributes = {'num-records': 1,
+ 'attributes-list': {'file-directory-security-policy': {'policy-name': data['policy_name']}}}
+ xml.translate_struct(attributes)
+ return xml
+
+
+class TestMyModule(unittest.TestCase):
+ ''' Unit tests for na_ontap_file_directory_policy '''
+
+ def mock_args(self):
+ return {
+ 'vserver': 'vserver',
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!'
+ }
+
+ def get_policy_mock_object(self, type='zapi', kind=None, status=None):
+ policy_obj = policy_module()
+ if type == 'zapi':
+ if kind is None:
+ policy_obj.server = MockONTAPConnection()
+ else:
+ policy_obj.server = MockONTAPConnection(kind=kind, data=status)
+ return policy_obj
+
+ def test_module_fail_when_required_args_missing(self):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ policy_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+ def test_successfully_create_policy(self):
+ data = self.mock_args()
+ data['policy_name'] = 'test_policy'
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_policy_mock_object('zapi', 'create', data).apply()
+ assert exc.value.args[0]['changed']
+
+ def test_error(self):
+ data = self.mock_args()
+ data['policy_name'] = 'test_policy'
+ set_module_args(data)
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_policy_mock_object('zapi', 'error', data).get_policy_iter()
+ assert exc.value.args[0]['msg'] == 'Error fetching file-directory policy test_policy: NetApp API failed. Reason - test:expect error'
+
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_policy_mock_object('zapi', 'error', data).create_policy()
+ assert exc.value.args[0]['msg'] == 'Error creating file-directory policy test_policy: NetApp API failed. Reason - test:expect error'
+
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_policy_mock_object('zapi', 'error', data).remove_policy()
+ assert exc.value.args[0]['msg'] == 'Error removing file-directory policy test_policy: NetApp API failed. Reason - test:expect error'
+
+ data['path'] = '/vol'
+ set_module_args(data)
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_policy_mock_object('zapi', 'error', data).get_task_iter()
+ assert exc.value.args[0]['msg'] == 'Error fetching task from file-directory policy test_policy: NetApp API failed. Reason - test:expect error'
+
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_policy_mock_object('zapi', 'error', data).add_task_to_policy()
+ assert exc.value.args[0]['msg'] == 'Error adding task to file-directory policy test_policy: NetApp API failed. Reason - test:expect error'
+
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_policy_mock_object('zapi', 'error', data).remove_task_from_policy()
+ assert exc.value.args[0]['msg'] == 'Error removing task from file-directory policy test_policy: NetApp API failed. Reason - test:expect error'
+
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_policy_mock_object('zapi', 'error', data).modify_task(dict())
+ assert exc.value.args[0]['msg'] == 'Error modifying task in file-directory policy test_policy: NetApp API failed. Reason - test:expect error'
+
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_policy_mock_object('zapi', 'error', data).set_sd()
+ assert exc.value.args[0]['msg'] == 'Error applying file-directory policy test_policy: NetApp API failed. Reason - test:expect error'
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_file_security_permissions.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_file_security_permissions.py
new file mode 100644
index 000000000..b25dca7ab
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_file_security_permissions.py
@@ -0,0 +1,647 @@
+# (c) 2022-2023, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import sys
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import assert_warning_was_raised, print_warnings, \
+ patch_ansible, call_main, create_and_apply, create_module, expect_and_capture_ansible_exception
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import get_mock_record, \
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_file_security_permissions \
+ import NetAppOntapFileSecurityPermissions as my_module, main as my_main # module under test
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+
+def build_acl(user, access='access_allow', access_control='file_directory', apply_to=None, inherited=None, advanced_rights='all', rights=None):
+ if apply_to is None:
+ apply_to = {'this_folder': True}
+ if advanced_rights == 'all':
+ advanced_rights = {
+ 'append_data': True,
+ 'delete': True,
+ 'delete_child': True,
+ 'execute_file': True,
+ 'full_control': True,
+ 'read_attr': True,
+ 'read_data': True,
+ 'read_ea': True,
+ 'read_perm': True,
+ 'synchronize': True,
+ 'write_attr': True,
+ 'write_data': True,
+ 'write_ea': True,
+ 'write_owner': True,
+ 'write_perm': True
+ }
+
+ acl = {
+ 'access': access,
+ 'access_control': access_control,
+ 'advanced_rights': advanced_rights,
+ 'apply_to': apply_to,
+ 'user': user
+ }
+ if inherited is not None:
+ acl['inherited'] = inherited
+ if rights is not None:
+ acl['rights'] = rights
+ return acl
+
+
+SRR = rest_responses({
+ 'non_acl': (200, {
+ 'path': '/vol200/aNewFile.txt',
+ 'svm': {'name': 'ansible_ipspace_datasvm', 'uuid': '55bcb009'}
+ }, None),
+ 'fd_acl_only_inherited_acl': (200, {
+ 'acls': [
+ build_acl('Everyone', inherited=True)
+ ],
+ 'control_flags': '0x8014',
+ 'group': 'BUILTIN\\Administrators',
+ 'owner': 'BUILTIN\\Administrators',
+ 'path': '/vol200/aNewFile.txt',
+ 'svm': {'name': 'ansible_ipspace_datasvm', 'uuid': '55bcb009'}
+ }, None),
+ 'fd_acl_multiple_user': (200, {
+ 'acls': [
+ build_acl('NETAPPAD\\mohan9'),
+ build_acl('SERVER_CIFS_TE\\mohan11'),
+ build_acl('Everyone', inherited=True)
+ ],
+ 'control_flags': '0x8014',
+ 'group': 'BUILTIN\\Administrators',
+ 'owner': 'BUILTIN\\Administrators',
+ 'path': '/vol200/aNewFile.txt',
+ 'svm': {'name': 'ansible_ipspace_datasvm', 'uuid': '55bcb009'}
+ }, None),
+ 'fd_acl_single_user_deny': (200, {
+ 'acls': [
+ build_acl('NETAPPAD\\mohan9', access='access_deny')
+ ],
+ 'control_flags': '0x8014',
+ 'group': 'BUILTIN\\Administrators',
+ 'owner': 'BUILTIN\\Administrators',
+ 'path': '/vol200/aNewFile.txt',
+ 'svm': {'name': 'ansible_ipspace_datasvm', 'uuid': '55bcb009'}
+ }, None),
+ 'fd_acl_single_user_deny_empty_advrights': (200, {
+ 'acls': [
+ build_acl('NETAPPAD\\mohan9', access='access_deny', advanced_rights={})
+ ],
+ 'control_flags': '0x8014',
+ 'group': 'BUILTIN\\Administrators',
+ 'owner': 'BUILTIN\\Administrators',
+ 'path': '/vol200/aNewFile.txt',
+ 'svm': {'name': 'ansible_ipspace_datasvm', 'uuid': '55bcb009'}
+ }, None),
+ 'fd_acl_single_user_deny_empty_advrights_mohan11': (200, {
+ 'acls': [
+ build_acl('NETAPPAD\\mohan9', access='access_deny', advanced_rights={})
+ ],
+ 'control_flags': '0x8014',
+ 'group': 'BUILTIN\\Administrators',
+ 'owner': 'SERVER_CIFS_TE\\mohan11',
+ 'path': '/vol200/aNewFile.txt',
+ 'svm': {'name': 'ansible_ipspace_datasvm', 'uuid': '55bcb009'}
+ }, None),
+ 'fd_acl_single_user_rights': (200, {
+ 'acls': [
+ build_acl('NETAPPAD\\mohan9', access='access_deny', advanced_rights={}, rights='full_control')
+ ],
+ 'control_flags': '0x8014',
+ 'group': 'BUILTIN\\Administrators',
+ 'owner': 'BUILTIN\\Administrators',
+ 'path': '/vol200/aNewFile.txt',
+ 'svm': {'name': 'ansible_ipspace_datasvm', 'uuid': '55bcb009'}
+ }, None),
+ 'slag_acl_same_user': (200, {
+ 'acls': [
+ build_acl('SERVER_CIFS_TE\\mohan11', access_control='slag', apply_to={'files': True}, advanced_rights={"append_data": True}, access='access_deny'),
+ build_acl('SERVER_CIFS_TE\\mohan11', access_control='slag', apply_to={'files': True}, advanced_rights={"append_data": True})
+ ],
+ 'control_flags': '0x8014',
+ 'group': 'BUILTIN\\Administrators',
+ 'owner': 'BUILTIN\\Administrators',
+ 'path': '/vol200/aNewFile.txt',
+ 'svm': {'name': 'ansible_ipspace_datasvm', 'uuid': '55bcb009'}
+ }, None),
+ 'svm_id': (200, {
+ 'uuid': '55bcb009'
+ }, None),
+ 'error_655865': (400, None, {'code': 655865, 'message': 'Expected error'}),
+})
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'vserver': 'vserver',
+ 'path': '/vol200/aNewFile.txt',
+ 'acls': [
+ {
+ "access": "access_allow",
+ "user": "SERVER_CIFS_TE\\mohan11",
+ "advanced_rights": {"append_data": True},
+ "apply_to": {"this_folder": True, "files": False, "sub_folders": False}
+ },
+ {
+ "access": "access_allow",
+ "user": "NETAPPAD\\mohan9",
+ "advanced_rights": {"append_data": True},
+ "apply_to": {"this_folder": True, "files": False, "sub_folders": False}
+ },
+
+ ]
+}
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ # with python 2.6, dictionaries are not ordered
+ fragments = ["missing required arguments:", "hostname", "vserver", "path"]
+ error = create_module(my_module, {}, fail=True)['msg']
+ for fragment in fragments:
+ assert fragment in error
+
+
+def test_create_file_directory_acl():
+ ''' create file_directory acl and idempotent '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_id']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['zero_records']),
+ ('POST', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['success']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['fd_acl_multiple_user']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_id']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['fd_acl_multiple_user']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_id']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['non_acl']),
+ ('POST', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt/acl', SRR['success']),
+ ('POST', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt/acl', SRR['success']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['fd_acl_multiple_user']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_id']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['zero_records']),
+ ('POST', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['success']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['non_acl']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_id']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['non_acl']),
+ ])
+ assert create_and_apply(my_module, DEFAULT_ARGS)['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS)['changed']
+ # Add ACLs to an SD only record
+ assert create_and_apply(my_module, DEFAULT_ARGS)['changed']
+ # create SD only
+ args = dict(DEFAULT_ARGS)
+ args.pop('acls')
+ assert create_and_apply(my_module, args)['changed']
+ assert not create_and_apply(my_module, args)['changed']
+
+
+def test_add_file_directory_acl():
+ ''' add file_directory acl and idempotent '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_id']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['fd_acl_multiple_user']),
+ ('DELETE', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt/acl/NETAPPAD%5Cmohan9', SRR['success']),
+ ('DELETE', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt/acl/SERVER_CIFS_TE%5Cmohan11', SRR['success']),
+ ('POST', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt/acl', SRR['success']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['fd_acl_single_user_deny']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_id']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['fd_acl_single_user_deny'])
+ ])
+ args = {
+ 'acls': [{
+ "access": "access_deny",
+ "user": "NETAPPAD\\mohan9",
+ "advanced_rights": {"append_data": True},
+ "apply_to": {"this_folder": True, "files": False, "sub_folders": False},
+ }]
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_delete_file_directory_acl():
+ ''' add file_directory acl and idempotent '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_id']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['fd_acl_multiple_user']),
+ ('DELETE', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt/acl/NETAPPAD%5Cmohan9', SRR['success']),
+ ('DELETE', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt/acl/SERVER_CIFS_TE%5Cmohan11', SRR['success']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['fd_acl_only_inherited_acl']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_id']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['fd_acl_only_inherited_acl']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_id']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['error_655865']),
+ ])
+ assert create_and_apply(my_module, DEFAULT_ARGS, {'state': 'absent'})['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS, {'state': 'absent'})['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS, {'state': 'absent'})['changed']
+
+
+def test_if_all_methods_catch_exception():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['generic_error']),
+ ('POST', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['generic_error']),
+ ('POST', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt/acl', SRR['generic_error']),
+ ('PATCH', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['generic_error']),
+ ('PATCH', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt/acl/user1', SRR['generic_error']),
+ ('DELETE', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt/acl/user1', SRR['generic_error'])
+ ])
+
+ acl_obj = create_module(my_module, DEFAULT_ARGS)
+ acl_obj.svm_uuid = "55bcb009"
+ assert 'Error fetching file security' in expect_and_capture_ansible_exception(acl_obj.get_file_security_permissions, 'fail')['msg']
+ assert 'Error creating file security' in expect_and_capture_ansible_exception(acl_obj.create_file_security_permissions, 'fail')['msg']
+ assert 'Error adding file security' in expect_and_capture_ansible_exception(acl_obj.add_file_security_permissions_acl, 'fail', {})['msg']
+ assert 'Error modifying file security' in expect_and_capture_ansible_exception(acl_obj.modify_file_security_permissions, 'fail', {})['msg']
+ acl = {'user': 'user1'}
+ assert 'Error modifying file security' in expect_and_capture_ansible_exception(acl_obj.modify_file_security_permissions_acl, 'fail', acl)['msg']
+ assert 'Error deleting file security permissions' in expect_and_capture_ansible_exception(acl_obj.delete_file_security_permissions_acl, 'fail', acl)['msg']
+ # no network calls
+ assert 'Error: mismatch on path values: desired:' in expect_and_capture_ansible_exception(
+ acl_obj.get_modify_actions, 'fail', {'path': 'dummy'})['msg']
+
+
+def test_create_file_directory_slag():
+ ''' create slag acl and idempotent '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_id']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['zero_records']),
+ ('POST', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['success']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['slag_acl_same_user']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_id']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['slag_acl_same_user'])
+ ])
+ args = {
+ 'access_control': 'slag',
+ 'acls': [
+ {
+ 'access': 'access_deny',
+ 'access_control': 'slag',
+ 'advanced_rights': {'append_data': True},
+ 'apply_to': {'files': True, "this_folder": False, "sub_folders": False},
+ 'user': 'SERVER_CIFS_TE\\mohan11'
+ },
+ {
+ 'access': 'access_allow',
+ 'access_control': 'slag',
+ 'advanced_rights': {'append_data': True},
+ 'apply_to': {'files': True, "this_folder": False, "sub_folders": False},
+ 'user': 'SERVER_CIFS_TE\\mohan11'
+ }
+ ]
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_modify_file_directory_owner():
+ ''' modify file owner '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_id']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['fd_acl_single_user_deny_empty_advrights']),
+ ('PATCH', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['success']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['fd_acl_single_user_deny_empty_advrights_mohan11']),
+ ])
+ args = {
+ 'acls': [{
+ "access": "access_deny",
+ "user": "NETAPPAD\\mohan9",
+ "advanced_rights": {"append_data": False},
+ "apply_to": {"this_folder": True, "files": False, "sub_folders": False},
+ }],
+ 'owner': 'SERVER_CIFS_TE\\mohan11'
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+ # idempotency already tested in create and add
+
+
+def test_modify_file_directory_acl_advrights():
+ ''' add file_directory acl and idempotent '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_id']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['fd_acl_single_user_deny']),
+ ('PATCH', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt/acl/NETAPPAD%5Cmohan9', SRR['success']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['fd_acl_single_user_deny_empty_advrights']),
+ ])
+ args = {
+ 'acls': [{
+ "access": "access_deny",
+ "user": "NETAPPAD\\mohan9",
+ "advanced_rights": {"append_data": False},
+ "apply_to": {"this_folder": True, "files": False, "sub_folders": False},
+ }]
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+ # idempotency already tested in create and add
+
+
+def test_modify_file_directory_acl_rights():
+ ''' add file_directory acl using rights
+ it always fails the validation check, as REST does not return rights
+ it is not idempotent for the same reason
+ '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_id']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['fd_acl_single_user_deny']),
+ ('PATCH', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt/acl/NETAPPAD%5Cmohan9', SRR['success']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['fd_acl_single_user_deny_empty_advrights']),
+ # 2nd run
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_id']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['fd_acl_single_user_deny']),
+ ('PATCH', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt/acl/NETAPPAD%5Cmohan9', SRR['success']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['fd_acl_single_user_deny_empty_advrights']),
+ ])
+ args = {
+ 'acls': [{
+ "access": "access_deny",
+ "user": "NETAPPAD\\mohan9",
+ "rights": 'modify',
+ "apply_to": {"this_folder": True, "files": False, "sub_folders": False},
+ }],
+ 'validate_changes': 'error'
+ }
+ error = "Error - patch-acls still required for [{"
+ assert error in call_main(my_main, DEFAULT_ARGS, args, fail=True)['msg']
+ args['validate_changes'] = 'warn'
+ assert call_main(my_main, DEFAULT_ARGS, args)['changed']
+ print_warnings()
+ assert_warning_was_raised('Error - patch-acls still required for [', partial_match=True)
+
+
+def test_negative_acl_rights_and_advrights():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1'])
+ ])
+ args = {
+ 'access_control': 'file_directory',
+ 'acls': [{
+ "access": "access_deny",
+ "user": "NETAPPAD\\mohan9",
+ "advanced_rights": {"append_data": False},
+ "rights": 'modify',
+ "apply_to": {"this_folder": True, "files": False, "sub_folders": False},
+ }],
+ 'validate_changes': 'error'
+
+ }
+ error = "Error: suboptions 'rights' and 'advanced_rights' are mutually exclusive."
+ assert error in call_main(my_main, DEFAULT_ARGS, args, fail=True)['msg']
+ del args['acls'][0]['rights']
+ args['acls'][0]['access_control'] = "slag"
+ error = "Error: mismatch between top level value and ACL value for"
+ assert error in call_main(my_main, DEFAULT_ARGS, args, fail=True)['msg']
+ args['acls'][0]['apply_to'] = {"this_folder": False, "files": False, "sub_folders": False}
+ error = "Error: at least one suboption must be true for apply_to. Got: "
+ assert error in call_main(my_main, DEFAULT_ARGS, args, fail=True)['msg']
+
+
+def test_get_acl_actions_on_create():
+ """ given a set of ACLs in self.parameters, split them in four groups, or fewer """
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ])
+
+ apply_to = {'this_folder': True, 'files': False, 'sub_folders': False}
+
+ fd_prop_acls = [
+ # All these ACLs fall into a single category, as file_directory and propagate are the defaults
+ {"access": "access_deny", "user": "user01", "apply_to": apply_to},
+ {"access": "access_deny", "user": "user02", "apply_to": apply_to, 'access_control': 'file_directory'},
+ {"access": "access_deny", "user": "user03", "apply_to": apply_to, 'access_control': 'file_directory', 'propagation_mode': 'propagate'},
+ {"access": "access_deny", "user": "user04", "apply_to": apply_to, 'propagation_mode': 'propagate'}
+ ]
+
+ fd_replace_acls = [
+ {"access": "access_deny", "user": "user11", "apply_to": apply_to, 'access_control': 'file_directory', 'propagation_mode': 'replace'},
+ {"access": "access_deny", "user": "user12", "apply_to": apply_to, 'propagation_mode': 'replace'}
+ ]
+
+ slag_prop_acls = [
+ {"access": "access_deny", "user": "user21", "apply_to": apply_to, 'access_control': 'slag'},
+ {"access": "access_deny", "user": "user22", "apply_to": apply_to, 'access_control': 'slag', 'propagation_mode': 'propagate'},
+ ]
+
+ slag_replace_acls = [
+ {"access": "access_deny", "user": "user31", "apply_to": apply_to, 'access_control': 'slag', 'propagation_mode': 'replace'},
+ ]
+
+ args = {
+ 'acls': fd_prop_acls,
+ 'validate_changes': 'error'
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, args)
+ acls = my_obj.get_acl_actions_on_create()
+ assert not any(acls[x] for x in acls)
+ assert my_obj.parameters['acls'] == fd_prop_acls
+
+ args = {
+ 'acls': fd_prop_acls + fd_replace_acls + slag_prop_acls + slag_replace_acls,
+ 'validate_changes': 'error'
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, args)
+ acls = my_obj.get_acl_actions_on_create()
+ print('P_ACLS', acls)
+ print('C_ACLS', my_obj.parameters['acls'])
+ assert len(acls['post-acls']) == 5
+ assert my_obj.parameters['acls'] == fd_prop_acls
+
+ args = {
+ 'acls': slag_replace_acls,
+ 'validate_changes': 'error'
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, args)
+ acls = my_obj.get_acl_actions_on_create()
+ assert not any(acls[x] for x in acls)
+ assert my_obj.parameters['acls'] == slag_replace_acls
+
+
+def test_get_acl_actions_on_create_special():
+ """ given a set of ACLs in self.parameters, split them in four groups, or fewer """
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ])
+
+ apply_to = {'this_folder': True, 'files': False, 'sub_folders': False}
+
+ fd_prop_acls = [
+ # All these ACLs fall into a single category, as file_directory and propagate are the defaults
+ {"access": "access_deny", "user": "user01", "apply_to": apply_to},
+ {"access": "access_deny", "user": "user02", "apply_to": apply_to, 'access_control': 'file_directory'},
+ {"access": "access_deny", "user": "user03", "apply_to": apply_to, 'access_control': 'file_directory', 'propagation_mode': 'propagate'},
+ {"access": "access_deny", "user": "user04", "apply_to": apply_to, 'propagation_mode': 'propagate'}
+ ]
+
+ fd_replace_acls = [
+ {"access": "access_deny", "user": "user11", "apply_to": apply_to, 'access_control': 'file_directory', 'propagation_mode': 'replace'},
+ {"access": "access_deny", "user": "user12", "apply_to": apply_to, 'propagation_mode': 'replace'}
+ ]
+
+ slag_prop_acls = [
+ {"access": "access_allowed_callback", "user": "user21", "apply_to": apply_to, 'access_control': 'slag'},
+ {"access": "access_denied_callback", "user": "user22", "apply_to": apply_to, 'access_control': 'slag', 'propagation_mode': 'propagate'},
+ ]
+
+ slag_replace_acls = [
+ {"access": "access_deny", "user": "user31", "apply_to": apply_to, 'access_control': 'slag', 'propagation_mode': 'replace'},
+ ]
+
+ fd_replace_acls_conflict = [
+ {"access": "access_denied_callback", "user": "user11", "apply_to": apply_to, 'access_control': 'file_directory', 'propagation_mode': 'replace'},
+ {"access": "access_allowed_callback", "user": "user12", "apply_to": apply_to, 'propagation_mode': 'replace'}
+ ]
+
+ args = {
+ 'acls': fd_prop_acls + fd_replace_acls + slag_prop_acls + slag_replace_acls,
+ 'validate_changes': 'error'
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, args)
+ acls = my_obj.get_acl_actions_on_create()
+ print('P_ACLS', acls)
+ print('C_ACLS', my_obj.parameters['acls'])
+ assert len(acls['post-acls']) == 7
+ assert my_obj.parameters['acls'] == slag_prop_acls
+
+ args = {
+ 'acls': fd_prop_acls + fd_replace_acls_conflict + slag_prop_acls + slag_replace_acls,
+ 'validate_changes': 'error'
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, args)
+ error = 'with access access_allowed_callback conflicts with other ACLs using accesses'
+ assert error in expect_and_capture_ansible_exception(my_obj.get_acl_actions_on_create, 'fail')['msg']
+
+
+def test_negative_unsupported_version():
+ ''' create slag acl and idempotent '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ # ('GET', 'svm/svms', SRR['svm_id']),
+ # ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['non_acl']),
+ # ('POST', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['success']),
+ # ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['slag_acl_same_user']),
+ # ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ # ('GET', 'svm/svms', SRR['svm_id']),
+ # ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['slag_acl_same_user'])
+ ])
+ args = {
+ 'access_control': 'slag',
+ 'acls': [
+ {
+ 'access': 'access_deny',
+ 'access_control': 'slag',
+ 'advanced_rights': {'append_data': True},
+ 'apply_to': {'files': True, "this_folder": False, "sub_folders": False},
+ 'user': 'SERVER_CIFS_TE\\mohan11'
+ },
+ {
+ 'access': 'access_allow',
+ 'access_control': 'slag',
+ 'advanced_rights': {'append_data': True},
+ 'apply_to': {'files': True, "this_folder": False, "sub_folders": False},
+ 'user': 'SERVER_CIFS_TE\\mohan11'
+ }
+ ]
+ }
+ error = 'Error: na_ontap_file_security_permissions only supports REST, and requires ONTAP 9.9.1 or later. Found: 9.8.0.'
+ assert error in call_main(my_main, DEFAULT_ARGS, args, fail=True)['msg']
+ error = 'Minimum version of ONTAP for access_control is (9, 10, 1)'
+ msg = call_main(my_main, DEFAULT_ARGS, args, fail=True)['msg']
+ assert error in msg
+ error = 'Minimum version of ONTAP for acls.access_control is (9, 10, 1)'
+ assert error in msg
+
+
+def test_match_acl_with_acls():
+ """ given a set of ACLs in self.parameters, split them in four groups, or fewer """
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ])
+
+ apply_to = {'this_folder': True, 'files': False, 'sub_folders': False}
+
+ fd_prop_acls = [
+ # All these ACLs fall into a single category, as file_directory and propagate are the defaults
+ {"access": "access_deny", "user": "user01", "apply_to": apply_to},
+ {"access": "access_deny", "user": "user02", "apply_to": apply_to, 'access_control': 'file_directory'},
+ {"access": "access_deny", "user": "user03", "apply_to": apply_to, 'access_control': 'file_directory', 'propagation_mode': 'propagate'},
+ {"access": "access_deny", "user": "user04", "apply_to": apply_to, 'propagation_mode': 'propagate'}
+ ]
+
+ fd_replace_acls = [
+ {"access": "access_deny", "user": "user11", "apply_to": apply_to, 'access_control': 'file_directory', 'propagation_mode': 'replace'},
+ {"access": "access_deny", "user": "user12", "apply_to": apply_to, 'propagation_mode': 'replace'}
+ ]
+
+ acl = fd_prop_acls[3]
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ assert acl == my_obj.match_acl_with_acls(acl, fd_prop_acls)
+ assert my_obj.match_acl_with_acls(acl, fd_replace_acls) is None
+ error = 'Error: found more than one desired ACLs with same user, access, access_control and apply_to'
+ assert error in expect_and_capture_ansible_exception(my_obj.match_acl_with_acls, 'fail', acl, fd_prop_acls + fd_prop_acls)['msg']
+
+
+def test_validate_changes():
+ """ verify nothing needs to be changed """
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/file-security/permissions/None/%2Fvol200%2FaNewFile.txt', SRR['zero_records']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/file-security/permissions/None/%2Fvol200%2FaNewFile.txt', SRR['fd_acl_single_user_deny']),
+ ])
+ args = {
+ 'validate_changes': 'ignore'
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, args)
+ assert my_obj.validate_changes('create', {}) is None
+ args = {
+ 'validate_changes': 'error'
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, args)
+ error = 'Error - create still required after create'
+ assert error in expect_and_capture_ansible_exception(my_obj.validate_changes, 'fail', 'create', {})['msg']
+ args = {
+ 'validate_changes': 'warn',
+ 'owner': 'new_owner'
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, args)
+ warning = "Error - modify: {'owner': 'new_owner'} still required after {'a': 'b'}"
+ assert my_obj.validate_changes('create', {'a': 'b'}) is None
+ assert_warning_was_raised(warning, partial_match=True)
+ assert_warning_was_raised('post-acls still required for', partial_match=True)
+ assert_warning_was_raised('delete-acls still required for', partial_match=True)
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_file_security_permissions_acl.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_file_security_permissions_acl.py
new file mode 100644
index 000000000..510f04a9e
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_file_security_permissions_acl.py
@@ -0,0 +1,331 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import sys
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import\
+ patch_ansible, assert_warning_was_raised, call_main, print_warnings, create_module, expect_and_capture_ansible_exception
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import\
+ get_mock_record, patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_file_security_permissions_acl\
+ import NetAppOntapFileSecurityPermissionsACL as my_module, main as my_main # module under test
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+
+def build_acl(user, access='access_allow', access_control='file_directory', apply_to=None, inherited=None, advanced_rights='all', rights=None):
+ if apply_to is None:
+ apply_to = {'this_folder': True}
+ if advanced_rights == 'all':
+ advanced_rights = {
+ 'append_data': True,
+ 'delete': True,
+ 'delete_child': True,
+ 'execute_file': True,
+ 'full_control': True,
+ 'read_attr': True,
+ 'read_data': True,
+ 'read_ea': True,
+ 'read_perm': True,
+ 'synchronize': True,
+ 'write_attr': True,
+ 'write_data': True,
+ 'write_ea': True,
+ 'write_owner': True,
+ 'write_perm': True
+ }
+
+ acl = {
+ 'access': access,
+ 'access_control': access_control,
+ 'advanced_rights': advanced_rights,
+ 'apply_to': apply_to,
+ 'user': user
+ }
+ if inherited is not None:
+ acl['inherited'] = inherited
+ if rights is not None:
+ acl['rights'] = rights
+ return acl
+
+
+SRR = rest_responses({
+ 'non_acl': (200, {
+ 'path': '/vol200/aNewFile.txt',
+ 'svm': {'name': 'ansible_ipspace_datasvm', 'uuid': '55bcb009'}
+ }, None),
+ 'fd_acl_only_inherited_acl': (200, {
+ 'acls': [
+ build_acl('Everyone', inherited=True)
+ ],
+ 'path': '/vol200/aNewFile.txt',
+ 'svm': {'name': 'ansible_ipspace_datasvm', 'uuid': '55bcb009'}
+ }, None),
+ 'fd_acl_multiple_user': (200, {
+ 'acls': [
+ build_acl('NETAPPAD\\mohan9'),
+ build_acl('SERVER_CIFS_TE\\mohan11'),
+ build_acl('Everyone', inherited=True)
+ ],
+ 'control_flags': '0x8014',
+ 'group': 'BUILTIN\\Administrators',
+ 'owner': 'BUILTIN\\Administrators',
+ 'path': '/vol200/aNewFile.txt',
+ 'svm': {'name': 'ansible_ipspace_datasvm', 'uuid': '55bcb009'}
+ }, None),
+ 'fd_acl_multiple_user_adv_rights': (200, {
+ 'acls': [
+ build_acl('NETAPPAD\\mohan9'),
+ build_acl('SERVER_CIFS_TE\\mohan11', advanced_rights={"append_data": True}),
+ build_acl('Everyone', inherited=True)
+ ],
+ 'control_flags': '0x8014',
+ 'group': 'BUILTIN\\Administrators',
+ 'owner': 'BUILTIN\\Administrators',
+ 'path': '/vol200/aNewFile.txt',
+ 'svm': {'name': 'ansible_ipspace_datasvm', 'uuid': '55bcb009'}
+ }, None),
+ 'fd_acl_single_user_deny': (200, {
+ 'acls': [
+ build_acl('SERVER_CIFS_TE\\mohan11', access='access_deny')
+ ],
+ 'control_flags': '0x8014',
+ 'group': 'BUILTIN\\Administrators',
+ 'owner': 'BUILTIN\\Administrators',
+ 'path': '/vol200/aNewFile.txt',
+ 'svm': {'name': 'ansible_ipspace_datasvm', 'uuid': '55bcb009'}
+ }, None),
+ 'slag_acl_same_user': (200, {
+ 'acls': [
+ build_acl('SERVER_CIFS_TE\\mohan11', access_control='slag', apply_to={'files': True}, advanced_rights={"append_data": True}, access='access_deny'),
+ build_acl('SERVER_CIFS_TE\\mohan11', access_control='slag', apply_to={'files': True}, advanced_rights={"append_data": True})
+ ],
+ 'control_flags': '0x8014',
+ 'group': 'BUILTIN\\Administrators',
+ 'owner': 'BUILTIN\\Administrators',
+ 'path': '/vol200/aNewFile.txt',
+ 'svm': {'name': 'ansible_ipspace_datasvm', 'uuid': '55bcb009'}
+ }, None),
+ 'svm_id': (200, {
+ 'uuid': '55bcb009'
+ }, None),
+ 'error_655865': (400, None, {'code': 655865, 'message': 'Expected error'}),
+})
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'vserver': 'vserver',
+ 'path': '/vol200/aNewFile.txt',
+ 'access_control': 'file_directory',
+ "access": "access_allow",
+ "acl_user": "SERVER_CIFS_TE\\mohan11",
+}
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ # with python 2.6, dictionaries are not ordered
+ fragments = ["missing required arguments:", "hostname", "vserver", "path"]
+ error = create_module(my_module, {}, fail=True)['msg']
+ for fragment in fragments:
+ assert fragment in error
+
+
+def test_create_file_directory_acl():
+ ''' create file_directory acl and idempotent '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_id']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['non_acl']),
+ ('POST', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt/acl', SRR['success']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['fd_acl_multiple_user']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_id']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['fd_acl_multiple_user'])
+ ])
+ module_args = {
+ "advanced_rights": {"append_data": True},
+ "apply_to": {"this_folder": True, "files": False, "sub_folders": False},
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_file_directory_acl():
+ ''' modify file_directory acl and idempotent '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_id']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['fd_acl_multiple_user']),
+ ('PATCH', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt/acl/SERVER_CIFS_TE%5Cmohan11', SRR['success']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['fd_acl_multiple_user_adv_rights']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_id']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['fd_acl_multiple_user_adv_rights']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_id']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['fd_acl_multiple_user_adv_rights']),
+ ('PATCH', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt/acl/SERVER_CIFS_TE%5Cmohan11', SRR['success']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['fd_acl_multiple_user_adv_rights']),
+ ])
+ module_args = {
+ 'advanced_rights': {'append_data': True, 'delete': False},
+ "apply_to": {"this_folder": True, "files": False, "sub_folders": False},
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ module_args = {
+ "apply_to": {"this_folder": True, "files": False, "sub_folders": False},
+ 'rights': 'full_control',
+ }
+ error = "Error - modify: {'rights': 'full_control'} still required after {'rights': 'full_control'}"
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_delete_file_directory_acl():
+ ''' add file_directory acl and idempotent '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_id']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['fd_acl_multiple_user']),
+ ('DELETE', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt/acl/SERVER_CIFS_TE%5Cmohan11', SRR['success']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['non_acl']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_id']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['fd_acl_only_inherited_acl']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_id']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['error_655865'])
+ ])
+ module_args = {
+ "advanced_rights": {"append_data": True},
+ "apply_to": {"this_folder": True, "files": False, "sub_folders": False},
+ "state": "absent"
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_negative_acl_rights_and_advrights():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1'])
+ ])
+ args = {
+ 'access_control': 'file_directory',
+ "access": "access_deny",
+ "acl_user": "NETAPPAD\\mohan9",
+ "advanced_rights": {"append_data": False},
+ "rights": 'modify',
+ "apply_to": {"this_folder": True, "files": False, "sub_folders": False},
+ 'validate_changes': 'error'
+
+ }
+ error = "Error: suboptions 'rights' and 'advanced_rights' are mutually exclusive."
+ assert error in call_main(my_main, DEFAULT_ARGS, args, fail=True)['msg']
+
+ del args['rights']
+ args['apply_to'] = {"this_folder": False, "files": False, "sub_folders": False}
+ error = "Error: at least one suboption must be true for apply_to. Got: "
+ assert error in call_main(my_main, DEFAULT_ARGS, args, fail=True)['msg']
+
+
+def test_if_all_methods_catch_exception():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['generic_error']),
+ ('POST', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt/acl', SRR['generic_error']),
+ ('PATCH', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt/acl/SERVER_CIFS_TE%5Cmohan11', SRR['generic_error']),
+ ('DELETE', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt/acl/SERVER_CIFS_TE%5Cmohan11', SRR['generic_error'])
+ ])
+ module_args = {
+ "advanced_rights": {"append_data": True},
+ "apply_to": {"this_folder": True, "files": False, "sub_folders": False}
+ }
+
+ acl_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ acl_obj.svm_uuid = "55bcb009"
+ assert 'Error fetching file security' in expect_and_capture_ansible_exception(acl_obj.get_file_security_permissions_acl, 'fail')['msg']
+ assert 'Error creating file security' in expect_and_capture_ansible_exception(acl_obj.create_file_security_permissions_acl, 'fail')['msg']
+ assert 'Error modifying file security' in expect_and_capture_ansible_exception(acl_obj.modify_file_security_permissions_acl, 'fail')['msg']
+ assert 'Error deleting file security permissions' in expect_and_capture_ansible_exception(acl_obj.delete_file_security_permissions_acl, 'fail')['msg']
+ assert 'Internal error - unexpected action bad_action' in expect_and_capture_ansible_exception(acl_obj.build_body, 'fail', 'bad_action')['msg']
+ acl = build_acl('user')
+ acls = [acl, acl]
+ assert 'Error matching ACLs, found more than one match. Found' in expect_and_capture_ansible_exception(acl_obj.match_acl_with_acls, 'fail',
+ acl, acls)['msg']
+
+
+def test_create_file_directory_slag():
+ ''' create slag acl and idempotent '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_id']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['non_acl']),
+ ('POST', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt/acl', SRR['success']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['slag_acl_same_user']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_id']),
+ ('GET', 'protocols/file-security/permissions/55bcb009/%2Fvol200%2FaNewFile.txt', SRR['slag_acl_same_user'])
+ ])
+ module_args = {
+ 'access_control': 'slag',
+ 'access': 'access_deny',
+ 'advanced_rights': {'append_data': True},
+ 'apply_to': {'files': True},
+ 'acl_user': 'SERVER_CIFS_TE\\mohan11'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_validate_changes():
+ """ verify nothing needs to be changed """
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/file-security/permissions/None/%2Fvol200%2FaNewFile.txt', SRR['zero_records']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/file-security/permissions/None/%2Fvol200%2FaNewFile.txt', SRR['fd_acl_single_user_deny']),
+ ])
+ args = {
+ "advanced_rights": {"append_data": True},
+ 'apply_to': {'files': True},
+ 'validate_changes': 'ignore',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, args)
+ assert my_obj.validate_changes('create', {}) is None
+ args = {
+ "advanced_rights": {"append_data": True},
+ 'apply_to': {'files': True},
+ 'validate_changes': 'error',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, args)
+ error = 'Error - create still required after create'
+ assert error in expect_and_capture_ansible_exception(my_obj.validate_changes, 'fail', 'create', {})['msg']
+ args = {
+ 'access': 'access_deny',
+ 'advanced_rights': {
+ 'append_data': False,
+ },
+ 'apply_to': {'this_folder': True},
+ 'validate_changes': 'warn',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, args)
+ warning = "Error - modify: {'advanced_rights': {'append_data': False}} still required after {'a': 'b'}"
+ assert my_obj.validate_changes('create', {'a': 'b'}) is None
+ print_warnings()
+ assert_warning_was_raised(warning, partial_match=True)
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_firewall_policy.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_firewall_policy.py
new file mode 100644
index 000000000..b23a897a3
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_firewall_policy.py
@@ -0,0 +1,263 @@
+# (c) 2018, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test template for ONTAP Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat import unittest
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import Mock
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_firewall_policy \
+ import NetAppONTAPFirewallPolicy as fp_module # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+class MockONTAPConnection(object):
+ ''' mock server connection to ONTAP host '''
+
+ def __init__(self, kind=None, data=None):
+ ''' save arguments '''
+ self.kind = kind
+ self.data = data
+ self.xml_in = None
+ self.xml_out = None
+
+ def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
+ ''' mock invoke_successfully returning xml data '''
+ self.xml_in = xml
+ if self.kind == 'policy':
+ xml = self.build_policy_info(self.data)
+ if self.kind == 'config':
+ xml = self.build_firewall_config_info(self.data)
+ self.xml_out = xml
+ return xml
+
+ @staticmethod
+ def build_policy_info(data):
+ ''' build xml data for net-firewall-policy-info '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ attributes = {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'net-firewall-policy-info': {
+ 'policy': data['policy'],
+ 'service': data['service'],
+ 'allow-list': [
+ {'ip-and-mask': '1.2.3.0/24'}
+ ]
+ }
+ }
+ }
+
+ xml.translate_struct(attributes)
+ return xml
+
+ @staticmethod
+ def build_firewall_config_info(data):
+ ''' build xml data for net-firewall-config-info '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ attributes = {
+ 'attributes': {
+ 'net-firewall-config-info': {
+ 'is-enabled': 'true',
+ 'is-logging': 'false'
+ }
+ }
+ }
+ xml.translate_struct(attributes)
+ return xml
+
+
+class TestMyModule(unittest.TestCase):
+ ''' a group of related Unit Tests '''
+
+ def setUp(self):
+ self.mock_policy = {
+ 'policy': 'test',
+ 'service': 'none',
+ 'vserver': 'my_vserver',
+ 'allow_list': '1.2.3.0/24'
+ }
+ self.mock_config = {
+ 'node': 'test',
+ 'enable': 'enable',
+ 'logging': 'enable'
+ }
+
+ def mock_policy_args(self):
+ return {
+ 'policy': self.mock_policy['policy'],
+ 'service': self.mock_policy['service'],
+ 'vserver': self.mock_policy['vserver'],
+ 'allow_list': [self.mock_policy['allow_list']],
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!'
+ }
+
+ def mock_config_args(self):
+ return {
+ 'node': self.mock_config['node'],
+ 'enable': self.mock_config['enable'],
+ 'logging': self.mock_config['logging'],
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!'
+ }
+
+ def get_mock_object(self, kind=None):
+ """
+ Helper method to return an na_ontap_firewall_policy object
+ :param kind: passes this param to MockONTAPConnection()
+ :return: na_ontap_firewall_policy object
+ """
+ obj = fp_module()
+ obj.autosupport_log = Mock(return_value=None)
+ if kind is None:
+ obj.server = MockONTAPConnection()
+ else:
+ mock_data = self.mock_config if kind == 'config' else self.mock_policy
+ obj.server = MockONTAPConnection(kind=kind, data=mock_data)
+ return obj
+
+ def test_module_fail_when_required_args_missing(self):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ fp_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+ def test_helper_firewall_policy_attributes(self):
+ ''' helper returns dictionary with vserver, service and policy details '''
+ data = self.mock_policy
+ set_module_args(self.mock_policy_args())
+ result = self.get_mock_object('policy').firewall_policy_attributes()
+ del data['allow_list']
+ assert data == result
+
+ def test_helper_validate_ip_addresses_positive(self):
+ ''' test if helper validates if IP is a network address '''
+ data = self.mock_policy_args()
+ data['allow_list'] = ['1.2.0.0/16', '1.2.3.0/24']
+ set_module_args(data)
+ result = self.get_mock_object().validate_ip_addresses()
+ assert result is None
+
+ def test_helper_validate_ip_addresses_negative(self):
+ ''' test if helper validates if IP is a network address '''
+ data = self.mock_policy_args()
+ data['allow_list'] = ['1.2.0.10/16', '1.2.3.0/24']
+ set_module_args(data)
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_mock_object().validate_ip_addresses()
+ msg = 'Error: Invalid IP network value 1.2.0.10/16.' \
+ ' Please specify a network address without host bits set: ' \
+ '1.2.0.10/16 has host bits set.'
+ assert exc.value.args[0]['msg'] == msg
+
+ def test_get_nonexistent_policy(self):
+ ''' Test if get_firewall_policy returns None for non-existent policy '''
+ set_module_args(self.mock_policy_args())
+ result = self.get_mock_object().get_firewall_policy()
+ assert result is None
+
+ def test_get_existing_policy(self):
+ ''' Test if get_firewall_policy returns policy details for existing policy '''
+ data = self.mock_policy_args()
+ set_module_args(data)
+ result = self.get_mock_object('policy').get_firewall_policy()
+ assert result['service'] == data['service']
+ assert result['allow_list'] == ['1.2.3.0/24'] # from build_policy_info()
+
+ def test_successful_create(self):
+ ''' Test successful create '''
+ set_module_args(self.mock_policy_args())
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_mock_object().apply()
+ assert exc.value.args[0]['changed']
+
+ def test_create_idempotency(self):
+ ''' Test create idempotency '''
+ set_module_args(self.mock_policy_args())
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_mock_object('policy').apply()
+ assert not exc.value.args[0]['changed']
+
+ def test_successful_delete(self):
+ ''' Test delete existing job '''
+ data = self.mock_policy_args()
+ data['state'] = 'absent'
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_mock_object('policy').apply()
+ assert exc.value.args[0]['changed']
+
+ def test_delete_idempotency(self):
+ ''' Test delete idempotency '''
+ data = self.mock_policy_args()
+ data['state'] = 'absent'
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_mock_object().apply()
+ assert not exc.value.args[0]['changed']
+
+ def test_successful_modify(self):
+ ''' Test successful modify allow_list '''
+ data = self.mock_policy_args()
+ data['allow_list'] = ['1.2.0.0/16']
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_mock_object('policy').apply()
+ assert exc.value.args[0]['changed']
+
+ def test_successful_modify_mutiple_ips(self):
+ ''' Test successful modify allow_list '''
+ data = self.mock_policy_args()
+ data['allow_list'] = ['1.2.0.0/16', '1.0.0.0/8']
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_mock_object('policy').apply()
+ assert exc.value.args[0]['changed']
+
+ def test_successful_modify_mutiple_ips_contain_existing(self):
+ ''' Test successful modify allow_list '''
+ data = self.mock_policy_args()
+ data['allow_list'] = ['1.2.3.0/24', '1.0.0.0/8']
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_mock_object('policy').apply()
+ assert exc.value.args[0]['changed']
+
+ def test_get_nonexistent_config(self):
+ ''' Test if get_firewall_config returns None for non-existent node '''
+ set_module_args(self.mock_config_args())
+ result = self.get_mock_object().get_firewall_config_for_node()
+ assert result is None
+
+ def test_get_existing_config(self):
+ ''' Test if get_firewall_config returns policy details for existing node '''
+ data = self.mock_config_args()
+ set_module_args(data)
+ result = self.get_mock_object('config').get_firewall_config_for_node()
+ assert result['enable'] == 'enable' # from build_config_info()
+ assert result['logging'] == 'disable' # from build_config_info()
+
+ def test_successful_modify_config(self):
+ ''' Test successful modify allow_list '''
+ data = self.mock_config_args()
+ data['enable'] = 'disable'
+ data['logging'] = 'enable'
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_mock_object('config').apply()
+ assert exc.value.args[0]['changed']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_firmware_upgrade.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_firmware_upgrade.py
new file mode 100644
index 000000000..140b91cd7
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_firmware_upgrade.py
@@ -0,0 +1,891 @@
+# (c) 2018, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests ONTAP Ansible module: na_ontap_firmware_upgrade '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat import unittest
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch, call
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_firmware_upgrade\
+ import NetAppONTAPFirmwareUpgrade as my_module # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+def mock_warn(me, log):
+ print('WARNING', log)
+
+
+class MockONTAPConnection(object):
+ ''' mock server connection to ONTAP host '''
+
+ def __init__(self, kind=None, parm1=None, parm2=None, parm3=None):
+ ''' save arguments '''
+ self.type = kind
+ self.parm1 = parm1
+ self.parm2 = parm2
+ # self.parm3 = parm3
+ self.xml_in = None
+ self.xml_out = None
+ self.firmware_type = 'None'
+
+ def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
+ ''' mock invoke_successfully returning xml data '''
+ self.xml_in = xml
+ print('xml_in', xml.to_string())
+ print('kind', self.type)
+ if self.type == 'firmware_upgrade':
+ xml = self.build_firmware_upgrade_info(self.parm1, self.parm2)
+ if self.type == 'acp':
+ xml = self.build_acp_firmware_info(self.firmware_type)
+ if self.type == 'disk_fw_info':
+ xml = self.build_disk_firmware_info(self.firmware_type)
+ if self.type == 'shelf_fw_info':
+ xml = self.build_shelf_firmware_info(self.firmware_type)
+ if self.type == 'firmware_download':
+ xml = self.build_system_cli_info(error=self.parm1)
+ if self.type == 'exception':
+ raise netapp_utils.zapi.NaApiError(self.parm1, self.parm2)
+ self.xml_out = xml
+ print('xml_out', xml.to_string())
+ return xml
+
+ @staticmethod
+ def build_firmware_upgrade_info(version, node):
+ ''' build xml data for service-processor firmware info '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ data = {
+ 'num-records': 1,
+ 'attributes-list': {'service-processor-info': {'firmware-version': '3.4'}}
+ }
+ xml.translate_struct(data)
+ return xml
+
+ @staticmethod
+ def build_acp_firmware_info(firmware_type):
+ ''' build xml data for acp firmware info '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ data = {
+ # 'num-records': 1,
+ 'attributes-list': {'storage-shelf-acp-module': {'state': 'firmware_update_required'}}
+ }
+ xml.translate_struct(data)
+ return xml
+
+ @staticmethod
+ def build_disk_firmware_info(firmware_type):
+ ''' build xml data for disk firmware info '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ data = {
+ 'num-records': 1,
+ 'attributes-list': [{'storage-disk-info': {'disk-uid': '1', 'disk-inventory-info': {'firmware-revision': '1.2.3'}}}]
+ }
+ xml.translate_struct(data)
+ return xml
+
+ @staticmethod
+ def build_shelf_firmware_info(firmware_type):
+ ''' build xml data for shelf firmware info '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ data = {
+ 'num-records': 1,
+ 'attributes-list': [{'storage-shelf-info': {'shelf-modules': {'storage-shelf-module-info': {'module-id': '1', 'module-fw-revision': '1.2.3'}}}}]
+ }
+ xml.translate_struct(data)
+ return xml
+
+ @staticmethod
+ def build_system_cli_info(error=None):
+ ''' build xml data for system-cli info '''
+ if error is None:
+ # make it a string, to be able to compare easily
+ error = ""
+ xml = netapp_utils.zapi.NaElement('results')
+ output = "" if error == 'empty_output' else 'Download complete.'
+ data = {
+ 'cli-output': output,
+ 'cli-result-value': 1
+ }
+ xml.translate_struct(data)
+ status = "failed" if error == 'status_failed' else "passed"
+ if error != 'no_status_attr':
+ xml.add_attr('status', status)
+ return xml
+
+
+class TestMyModule(unittest.TestCase):
+ ''' a group of related Unit Tests '''
+
+ def setUp(self):
+ self.server = MockONTAPConnection()
+ self.use_vsim = False
+
+ def set_default_args(self):
+ if self.use_vsim:
+ hostname = '10.10.10.10'
+ username = 'admin'
+ password = 'admin'
+ node = 'vsim1'
+ else:
+ hostname = 'hostname'
+ username = 'username'
+ password = 'password'
+ node = 'abc'
+ package = 'test1.zip'
+ force_disruptive_update = False
+ clear_logs = True
+ install_baseline_image = False
+ update_type = 'serial_full'
+ use_rest = 'never'
+ return dict({
+ 'hostname': hostname,
+ 'username': username,
+ 'password': password,
+ 'node': node,
+ 'package': package,
+ 'clear_logs': clear_logs,
+ 'install_baseline_image': install_baseline_image,
+ 'update_type': update_type,
+ 'https': 'true',
+ 'force_disruptive_update': force_disruptive_update,
+ 'use_rest': use_rest,
+ 'feature_flags': {'trace_apis': True}
+ })
+
+ def test_module_fail_when_required_args_missing(self):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ my_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+ def test_ensure_sp_firmware_get_called(self):
+ module_args = {}
+ module_args.update(self.set_default_args())
+ module_args['firmware_type'] = 'service-processor'
+ set_module_args(module_args)
+ my_obj = my_module()
+ my_obj.server = self.server
+ firmware_image_get = my_obj.firmware_image_get('node')
+ print('Info: test_firmware_upgrade_get: %s' % repr(firmware_image_get))
+ assert firmware_image_get is None
+
+ def test_negative_package_and_baseline_present(self):
+ module_args = {}
+ module_args.update(self.set_default_args())
+ module_args['firmware_type'] = 'service-processor'
+ module_args['package'] = 'test1.zip'
+ module_args['install_baseline_image'] = True
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args(module_args)
+ my_module()
+ msg = 'With ZAPI and firmware_type set to service-processor: do not specify both package and install_baseline_image: true.'
+ print('info: ' + exc.value.args[0]['msg'])
+ assert exc.value.args[0]['msg'] == msg
+
+ def test_negative_package_and_baseline_absent(self):
+ module_args = {}
+ module_args.update(self.set_default_args())
+ module_args['firmware_type'] = 'service-processor'
+ module_args.pop('package')
+ module_args['install_baseline_image'] = False
+ module_args['force_disruptive_update'] = True
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args(module_args)
+ my_module()
+ msg = 'With ZAPI and firmware_type set to service-processor: specify at least one of package or install_baseline_image: true.'
+ print('info: ' + exc.value.args[0]['msg'])
+ assert exc.value.args[0]['msg'] == msg
+
+ def test_ensure_acp_firmware_update_required_called(self):
+ ''' a test tp verify acp firmware upgrade is required or not '''
+ module_args = {}
+ module_args.update(self.set_default_args())
+ module_args['firmware_type'] = 'acp'
+ set_module_args(module_args)
+ my_obj = my_module()
+ # my_obj.server = self.server
+ my_obj.server = MockONTAPConnection(kind='acp')
+ acp_firmware_update_required = my_obj.acp_firmware_update_required()
+ print('Info: test_acp_firmware_upgrade_required_get: %s' % repr(acp_firmware_update_required))
+ assert acp_firmware_update_required is True
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_firmware_upgrade.NetAppONTAPFirmwareUpgrade.sp_firmware_image_update')
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_firmware_upgrade.NetAppONTAPFirmwareUpgrade.sp_firmware_image_update_progress_get')
+ def test_ensure_apply_for_firmware_upgrade_called(self, get_mock, upgrade_mock):
+ ''' updgrading firmware and checking idempotency '''
+ module_args = {}
+ module_args.update(self.set_default_args())
+ module_args['package'] = 'test1.zip'
+ module_args['firmware_type'] = 'service-processor'
+ module_args['force_disruptive_update'] = True
+ set_module_args(module_args)
+ my_obj = my_module()
+ if not self.use_vsim:
+ my_obj.server = self.server
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: test_firmware_upgrade_apply: %s' % repr(exc.value))
+ assert not exc.value.args[0]['changed']
+ if not self.use_vsim:
+ my_obj.server = MockONTAPConnection('firmware_upgrade', '3.5', 'true')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: test_firmware_upgrade_apply: %s' % repr(exc.value))
+ assert exc.value.args[0]['changed']
+ upgrade_mock.assert_called_with()
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_firmware_upgrade.NetAppONTAPFirmwareUpgrade.shelf_firmware_upgrade')
+ def test_shelf_firmware_upgrade(self, upgrade_mock):
+ ''' Test shelf firmware upgrade '''
+ module_args = {}
+ module_args.update(self.set_default_args())
+ module_args['firmware_type'] = 'shelf'
+ set_module_args(module_args)
+ my_obj = my_module()
+ if not self.use_vsim:
+ my_obj.server = self.server
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: test_firmware_upgrade_apply: %s' % repr(exc.value))
+ assert not exc.value.args[0]['changed']
+ assert not upgrade_mock.called
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_firmware_upgrade.NetAppONTAPFirmwareUpgrade.shelf_firmware_upgrade')
+ def test_shelf_firmware_upgrade_force(self, upgrade_mock):
+ ''' Test shelf firmware upgrade '''
+ module_args = {}
+ module_args.update(self.set_default_args())
+ module_args['firmware_type'] = 'shelf'
+ module_args['force_disruptive_update'] = True
+ set_module_args(module_args)
+ my_obj = my_module()
+ if not self.use_vsim:
+ my_obj.server = self.server
+ upgrade_mock.return_value = True
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: test_firmware_upgrade_apply: %s' % repr(exc.value))
+ assert exc.value.args[0]['changed']
+ assert upgrade_mock.called
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_firmware_upgrade.NetAppONTAPFirmwareUpgrade.shelf_firmware_upgrade')
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_firmware_upgrade.NetAppONTAPFirmwareUpgrade.shelf_firmware_update_required')
+ def test_shelf_firmware_upgrade_force_update_required(self, update_required_mock, upgrade_mock):
+ ''' Test shelf firmware upgrade '''
+ module_args = {}
+ module_args.update(self.set_default_args())
+ module_args['firmware_type'] = 'shelf'
+ module_args['force_disruptive_update'] = True
+ module_args['shelf_module_fw'] = "version"
+ set_module_args(module_args)
+ my_obj = my_module()
+ if not self.use_vsim:
+ my_obj.server = self.server
+ update_required_mock.return_value = True
+ upgrade_mock.return_value = True
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: test_firmware_upgrade_apply: %s' % repr(exc.value))
+ assert exc.value.args[0]['changed']
+ assert upgrade_mock.called
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_firmware_upgrade.NetAppONTAPFirmwareUpgrade.acp_firmware_upgrade')
+ def test_acp_firmware_upgrade(self, upgrade_mock):
+ ''' Test ACP firmware upgrade '''
+ module_args = {}
+ module_args.update(self.set_default_args())
+ module_args['firmware_type'] = 'acp'
+ set_module_args(module_args)
+ my_obj = my_module()
+ if not self.use_vsim:
+ my_obj.server = self.server
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: test_firmware_upgrade_apply: %s' % repr(exc.value))
+ assert not exc.value.args[0]['changed']
+ assert not upgrade_mock.called
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_firmware_upgrade.NetAppONTAPFirmwareUpgrade.acp_firmware_upgrade')
+ def test_acp_firmware_upgrade_force(self, upgrade_mock):
+ ''' Test ACP firmware upgrade '''
+ module_args = {}
+ module_args.update(self.set_default_args())
+ module_args['firmware_type'] = 'acp'
+ module_args['force_disruptive_update'] = True
+ set_module_args(module_args)
+ my_obj = my_module()
+ if not self.use_vsim:
+ my_obj.server = MockONTAPConnection(kind='acp')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: test_firmware_upgrade_apply: %s' % repr(exc.value))
+ assert exc.value.args[0]['changed']
+ assert upgrade_mock.called
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_firmware_upgrade.NetAppONTAPFirmwareUpgrade.disk_firmware_upgrade')
+ def test_disk_firmware_upgrade(self, upgrade_mock):
+ ''' Test disk firmware upgrade '''
+ module_args = {}
+ module_args.update(self.set_default_args())
+ module_args['firmware_type'] = 'disk'
+ set_module_args(module_args)
+ my_obj = my_module()
+ if not self.use_vsim:
+ my_obj.server = self.server
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: test_firmware_upgrade_apply: %s' % repr(exc.value))
+ assert not exc.value.args[0]['changed']
+ assert not upgrade_mock.called
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_firmware_upgrade.NetAppONTAPFirmwareUpgrade.disk_firmware_upgrade')
+ def test_disk_firmware_upgrade_force(self, upgrade_mock):
+ ''' Test disk firmware upgrade '''
+ module_args = {}
+ module_args.update(self.set_default_args())
+ module_args['firmware_type'] = 'disk'
+ module_args['force_disruptive_update'] = True
+ set_module_args(module_args)
+ my_obj = my_module()
+ if not self.use_vsim:
+ my_obj.server = self.server
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: test_firmware_upgrade_apply: %s' % repr(exc.value))
+ assert exc.value.args[0]['changed']
+ assert upgrade_mock.called
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_firmware_upgrade.NetAppONTAPFirmwareUpgrade.disk_firmware_upgrade')
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_firmware_upgrade.NetAppONTAPFirmwareUpgrade.disk_firmware_update_required')
+ def test_disk_firmware_upgrade_force_update_required(self, update_required_mock, upgrade_mock):
+ ''' Test disk firmware upgrade '''
+ module_args = {}
+ module_args.update(self.set_default_args())
+ module_args['firmware_type'] = 'disk'
+ module_args['force_disruptive_update'] = True
+ module_args['disk_fw'] = "version"
+ set_module_args(module_args)
+ my_obj = my_module()
+ if not self.use_vsim:
+ my_obj.server = self.server
+ update_required_mock.return_value = True
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: test_firmware_upgrade_apply: %s' % repr(exc.value))
+ assert exc.value.args[0]['changed']
+ assert upgrade_mock.called
+
+ def test_acp_firmware_update_required(self):
+ ''' Test acp_firmware_update_required '''
+ module_args = {}
+ module_args.update(self.set_default_args())
+ set_module_args(module_args)
+ my_obj = my_module()
+ if not self.use_vsim:
+ my_obj.server = MockONTAPConnection('acp')
+ result = my_obj.acp_firmware_update_required()
+ assert result
+
+ def test_acp_firmware_update_required_false(self):
+ ''' Test acp_firmware_update_required '''
+ module_args = {}
+ module_args.update(self.set_default_args())
+ set_module_args(module_args)
+ my_obj = my_module()
+ if not self.use_vsim:
+ my_obj.server = MockONTAPConnection()
+ result = my_obj.acp_firmware_update_required()
+ assert not result
+
+ def test_negative_acp_firmware_update_required(self):
+ ''' Test acp_firmware_update_required '''
+ module_args = {}
+ module_args.update(self.set_default_args())
+ set_module_args(module_args)
+ my_obj = my_module()
+ if not self.use_vsim:
+ my_obj.server = MockONTAPConnection('exception')
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.acp_firmware_update_required()
+ msg = "Error fetching acp firmware details details: NetApp API failed. Reason - None:None"
+ assert msg in exc.value.args[0]['msg']
+
+ def test_disk_firmware_update_required(self):
+ ''' Test disk_firmware_update_required '''
+ module_args = {}
+ module_args.update(self.set_default_args())
+ module_args['disk_fw'] = '1.2.4'
+ set_module_args(module_args)
+ my_obj = my_module()
+ if not self.use_vsim:
+ my_obj.server = MockONTAPConnection('disk_fw_info')
+ result = my_obj.disk_firmware_update_required()
+ assert result
+
+ def test_negative_disk_firmware_update_required(self):
+ ''' Test disk_firmware_update_required '''
+ module_args = {}
+ module_args.update(self.set_default_args())
+ module_args['disk_fw'] = '1.2.4'
+ set_module_args(module_args)
+ my_obj = my_module()
+ if not self.use_vsim:
+ my_obj.server = MockONTAPConnection('exception')
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.disk_firmware_update_required()
+ msg = "Error fetching disk module firmware details: NetApp API failed. Reason - None:None"
+ assert msg in exc.value.args[0]['msg']
+
+ def test_shelf_firmware_update_required(self):
+ ''' Test shelf_firmware_update_required '''
+ module_args = {}
+ module_args.update(self.set_default_args())
+ module_args['shelf_module_fw'] = '1.2.4'
+ set_module_args(module_args)
+ my_obj = my_module()
+ if not self.use_vsim:
+ my_obj.server = MockONTAPConnection('shelf_fw_info')
+ result = my_obj.shelf_firmware_update_required()
+ assert result
+
+ def test_negative_shelf_firmware_update_required(self):
+ ''' Test shelf_firmware_update_required '''
+ module_args = {}
+ module_args.update(self.set_default_args())
+ module_args['shelf_module_fw'] = '1.2.4'
+ set_module_args(module_args)
+ my_obj = my_module()
+ if not self.use_vsim:
+ my_obj.server = MockONTAPConnection('exception')
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.shelf_firmware_update_required()
+ msg = "Error fetching shelf module firmware details: NetApp API failed. Reason - None:None"
+ assert msg in exc.value.args[0]['msg']
+
+ def test_firmware_download(self):
+ ''' Test firmware download '''
+ module_args = {}
+ module_args.update(self.set_default_args())
+ module_args['package_url'] = 'dummy_url'
+ set_module_args(module_args)
+ my_obj = my_module()
+ if not self.use_vsim:
+ my_obj.server = MockONTAPConnection('firmware_download')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+ msg = "Firmware download completed. Extra info: Download complete."
+ assert exc.value.args[0]['msg'] == msg
+
+ def test_60(self):
+ ''' Test firmware download '''
+ module_args = {}
+ module_args.update(self.set_default_args())
+ module_args['package_url'] = 'dummy_url'
+ set_module_args(module_args)
+ my_obj = my_module()
+ if not self.use_vsim:
+ my_obj.server = MockONTAPConnection('exception', 60, 'ZAPI timeout')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+ msg = "Firmware download completed, slowly."
+ assert exc.value.args[0]['msg'] == msg
+
+ def test_firmware_download_502(self):
+ ''' Test firmware download '''
+ module_args = {}
+ module_args.update(self.set_default_args())
+ module_args['package_url'] = 'dummy_url'
+ set_module_args(module_args)
+ my_obj = my_module()
+ if not self.use_vsim:
+ my_obj.server = MockONTAPConnection('exception', 502, 'Bad GW')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+ msg = "Firmware download still in progress."
+ assert exc.value.args[0]['msg'] == msg
+
+ def test_firmware_download_502_as_error(self):
+ ''' Test firmware download '''
+ module_args = {}
+ module_args.update(self.set_default_args())
+ module_args['package_url'] = 'dummy_url'
+ module_args['fail_on_502_error'] = True
+ set_module_args(module_args)
+ my_obj = my_module()
+ if not self.use_vsim:
+ my_obj.server = MockONTAPConnection('exception', 502, 'Bad GW')
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ msg = "NetApp API failed. Reason - 502:Bad GW"
+ assert msg in exc.value.args[0]['msg']
+
+ def test_firmware_download_no_num_error(self):
+ ''' Test firmware download '''
+ module_args = {}
+ module_args.update(self.set_default_args())
+ module_args['package_url'] = 'dummy_url'
+ set_module_args(module_args)
+ my_obj = my_module()
+ if not self.use_vsim:
+ my_obj.server = MockONTAPConnection('exception', 'some error string', 'whatever')
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ msg = "NetApp API failed. Reason - some error string:whatever"
+ assert msg in exc.value.args[0]['msg']
+
+ def test_firmware_download_no_status_attr(self):
+ ''' Test firmware download '''
+ module_args = {}
+ module_args.update(self.set_default_args())
+ module_args['package_url'] = 'dummy_url'
+ set_module_args(module_args)
+ my_obj = my_module()
+ if not self.use_vsim:
+ my_obj.server = MockONTAPConnection('firmware_download', 'no_status_attr')
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ msg = "unable to download package from dummy_url: 'status' attribute missing."
+ assert exc.value.args[0]['msg'].startswith(msg)
+
+ def test_firmware_download_status_failed(self):
+ ''' Test firmware download '''
+ module_args = {}
+ module_args.update(self.set_default_args())
+ module_args['package_url'] = 'dummy_url'
+ set_module_args(module_args)
+ my_obj = my_module()
+ if not self.use_vsim:
+ my_obj.server = MockONTAPConnection('firmware_download', 'status_failed')
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ msg = "unable to download package from dummy_url: check 'status' value."
+ assert exc.value.args[0]['msg'].startswith(msg)
+
+ def test_firmware_download_empty_output(self):
+ ''' Test firmware download '''
+ module_args = {}
+ module_args.update(self.set_default_args())
+ module_args['package_url'] = 'dummy_url'
+ set_module_args(module_args)
+ my_obj = my_module()
+ if not self.use_vsim:
+ my_obj.server = MockONTAPConnection('firmware_download', 'empty_output')
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ msg = "unable to download package from dummy_url: check console permissions."
+ assert exc.value.args[0]['msg'].startswith(msg)
+
+
+# REST API canned responses when mocking send_request
+SRR = {
+ # common responses
+ 'is_rest': (200, {}, None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'zero_record': (200, {'num_records': 0}, None),
+ 'end_of_sequence': (500, None, "Unexpected call to send_request"),
+ 'generic_error': (400, None, "Expected error"),
+ # module specific responses
+ 'uuid_record': (200,
+ {'records': [{"uuid": '1cd8a442-86d1-11e0-ae1c-123478563412'}]}, None),
+ 'nodes_record': (200,
+ {'records': [{"name": 'node1'}, {"name": 'node2'}]}, None),
+ 'net_routes_record': (200,
+ {'records': [{"destination": {"address": "176.0.0.0",
+ "netmask": "24",
+ "family": "ipv4"},
+ "gateway": '10.193.72.1',
+ "uuid": '1cd8a442-86d1-11e0-ae1c-123478563412',
+ "svm": {"name": "test_vserver"}}]}, None),
+ 'modified_record': (200,
+ {'records': [{"destination": {"address": "0.0.0.0",
+ "netmask": "0",
+ "family": "ipv4"},
+ "gateway": "10.193.72.1",
+ "uuid": '1cd8a442-86d1-11e0-ae1c-123478563412',
+ "svm": {"name": "test_vserver"}}]}, None),
+ 'sp_state_online': (200,
+ {'service_processor': {'state': 'online'}}, None),
+ 'sp_state_rebooting': (200,
+ {'service_processor': {'state': 'rebooting'}}, None),
+ 'unexpected_arg': (400, None, 'Unexpected argument "service_processor.action"'),
+}
+
+
+def set_default_module_args(use_rest='always'):
+ hostname = 'hostname'
+ username = 'username'
+ password = 'password'
+ use_rest = 'always'
+ return dict({
+ 'hostname': hostname,
+ 'username': username,
+ 'password': password,
+ 'https': 'true',
+ 'use_rest': use_rest,
+ 'package_url': 'https://download.site.com'
+ })
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_successfully_download(mock_request, patch_ansible):
+ data = set_default_module_args(use_rest='always')
+ data['state'] = 'present'
+ data['reboot_sp'] = False
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['empty_good'], # post download
+ SRR['is_rest'],
+ SRR['empty_good'], # post download
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_module().apply()
+ assert exc.value.args[0]['changed']
+ print(mock_request.call_args)
+ json = {'url': 'https://download.site.com'}
+ expected = call('POST', 'cluster/software/download', None, json=json, headers=None, files=None)
+ assert mock_request.call_args == expected
+ data['server_username'] = 'user'
+ data['server_password'] = 'pass'
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_module().apply()
+ print(mock_request.call_args)
+ json = {'url': 'https://download.site.com', 'username': 'user', 'password': 'pass'}
+ expected = call('POST', 'cluster/software/download', None, json=json, headers=None, files=None)
+ assert mock_request.call_args == expected
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_negative_download(mock_request, patch_ansible):
+ data = set_default_module_args(use_rest='always')
+ data['state'] = 'present'
+ data['reboot_sp'] = False
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['generic_error'], # post download
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_module().apply()
+ msg = 'Error downloading software: calling: cluster/software/download: got Expected error.'
+ assert msg in exc.value.args[0]['msg']
+
+
+@patch('time.sleep')
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_successfully_reboot_sp_and_download(mock_request, dont_sleep, patch_ansible):
+ data = set_default_module_args(use_rest='always')
+ data['state'] = 'present'
+ data['reboot_sp'] = True
+ data['node'] = 'node4'
+ data['firmware_type'] = 'service-processor'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['uuid_record'], # get UUID
+ SRR['empty_good'], # patch reboot
+ SRR['empty_good'], # post download
+ SRR['sp_state_rebooting'], # get sp state
+ SRR['sp_state_rebooting'], # get sp state
+ SRR['sp_state_online'], # get sp state
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_module().apply()
+ assert exc.value.args[0]['changed']
+
+
+@patch('time.sleep')
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_negative_reboot_sp_and_download_bad_sp(mock_request, dont_sleep, patch_ansible):
+ """fail to read SP state"""
+ data = set_default_module_args(use_rest='always')
+ data['state'] = 'present'
+ data['reboot_sp'] = True
+ data['node'] = 'node4'
+ data['firmware_type'] = 'service-processor'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['uuid_record'], # get UUID
+ SRR['empty_good'], # patch reboot
+ SRR['empty_good'], # post download
+ SRR['sp_state_rebooting'], # get sp state
+ SRR['sp_state_rebooting'], # get sp state
+ SRR['generic_error'], # get sp state
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_module().apply()
+ msg = 'Error getting node SP state:'
+ assert msg in exc.value.args[0]['msg']
+
+
+@patch('time.sleep')
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_negative_reboot_sp_and_download_sp_timeout(mock_request, dont_sleep, patch_ansible):
+ """fail to read SP state"""
+ data = set_default_module_args(use_rest='always')
+ data['state'] = 'present'
+ data['reboot_sp'] = True
+ data['node'] = 'node4'
+ data['firmware_type'] = 'service-processor'
+ set_module_args(data)
+ responses = [
+ SRR['is_rest'],
+ SRR['uuid_record'], # get UUID
+ SRR['empty_good'], # patch reboot
+ SRR['empty_good'], # post download
+ ]
+ # 20 retries
+ responses.extend([SRR['sp_state_rebooting']] * 20)
+ responses.append(SRR['sp_state_online'])
+ responses.append(SRR['end_of_sequence'])
+ mock_request.side_effect = responses
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_module().apply()
+ # msg = 'Error getting node SP state:'
+ # assert msg in exc.value.args[0]['msg']
+ print('RETRIES', exc.value.args[0])
+
+
+@patch('time.sleep')
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_successfully_reboot_sp_and_download_cli(mock_request, dont_sleep, patch_ansible):
+ ''' switch back to REST CLI for reboot '''
+ data = set_default_module_args(use_rest='always')
+ data['state'] = 'present'
+ data['reboot_sp'] = True
+ data['node'] = 'node4'
+ data['firmware_type'] = 'service-processor'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['uuid_record'], # get UUID
+ SRR['unexpected_arg'], # patch reboot
+ SRR['empty_good'], # REST CLI reboot
+ SRR['empty_good'], # post download
+ SRR['sp_state_rebooting'], # get sp state
+ SRR['sp_state_rebooting'], # get sp state
+ SRR['sp_state_online'], # get sp state
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_module().apply()
+ assert exc.value.args[0]['changed']
+
+
+@patch('time.sleep')
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_negative_reboot_sp_and_download_cli(mock_request, dont_sleep, patch_ansible):
+ ''' switch back to REST CLI for reboot '''
+ data = set_default_module_args(use_rest='always')
+ data['state'] = 'present'
+ data['reboot_sp'] = True
+ data['node'] = 'node4'
+ data['firmware_type'] = 'service-processor'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['uuid_record'], # get UUID
+ SRR['unexpected_arg'], # patch reboot
+ SRR['generic_error'], # REST CLI reboot
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_module().apply()
+ msg = 'Error rebooting node SP: reboot_sp requires ONTAP 9.10.1 or newer, falling back to CLI passthrough failed'
+ assert msg in exc.value.args[0]['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_negative_reboot_sp_and_download_uuid_error(mock_request, patch_ansible):
+ data = set_default_module_args(use_rest='always')
+ data['state'] = 'present'
+ data['reboot_sp'] = True
+ data['node'] = 'node4'
+ data['firmware_type'] = 'service-processor'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['generic_error'], # get UUID
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_module().apply()
+ msg = 'Error reading node UUID: calling: cluster/nodes: got Expected error.'
+ assert msg in exc.value.args[0]['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_negative_reboot_sp_and_download_node_not_found(mock_request, patch_ansible):
+ data = set_default_module_args(use_rest='always')
+ data['state'] = 'present'
+ data['reboot_sp'] = True
+ data['node'] = 'node4'
+ data['firmware_type'] = 'service-processor'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['zero_record'], # get UUID
+ SRR['nodes_record'], # get nodes
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_module().apply()
+ msg = 'Error: node not found node4, current nodes: node1, node2.'
+ assert msg in exc.value.args[0]['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_negative_reboot_sp_and_download_nodes_get_error(mock_request, patch_ansible):
+ data = set_default_module_args(use_rest='always')
+ data['state'] = 'present'
+ data['reboot_sp'] = True
+ data['node'] = 'node4'
+ data['firmware_type'] = 'service-processor'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['zero_record'], # get UUID
+ SRR['generic_error'], # get nodes
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_module().apply()
+ msg = 'Error reading nodes: calling: cluster/nodes: got Expected error.'
+ assert msg in exc.value.args[0]['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_negative_unsupported_option_with_rest(mock_request, patch_ansible):
+ data = set_default_module_args(use_rest='always')
+ data['state'] = 'present'
+ data['clear_logs'] = False
+ data['node'] = 'node4'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_module().apply()
+ msg = "REST API currently does not support 'clear_logs'"
+ assert msg in exc.value.args[0]['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_flexcache.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_flexcache.py
new file mode 100644
index 000000000..07e01940a
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_flexcache.py
@@ -0,0 +1,838 @@
+# (c) 2018-2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test for ONTAP FlexCache Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import\
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_error_message, rest_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, build_zapi_error, zapi_error_message, zapi_responses
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import\
+ assert_warning_was_raised, call_main, create_module, expect_and_capture_ansible_exception, patch_ansible, print_warnings
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_flexcache import NetAppONTAPFlexCache as my_module, main as my_main # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+flexcache_info = {
+ 'vserver': 'vserver',
+ 'origin-vserver': 'ovserver',
+ 'origin-volume': 'ovolume',
+ 'origin-cluster': 'ocluster',
+ 'volume': 'flexcache_volume',
+}
+
+flexcache_get_info = {
+ 'attributes-list': [{
+ 'flexcache-info': flexcache_info
+ }]
+}
+
+flexcache_get_info_double = {
+ 'attributes-list': [
+ {
+ 'flexcache-info': flexcache_info
+ },
+ {
+ 'flexcache-info': flexcache_info
+ }
+ ]
+}
+
+
+def results_info(status):
+ return {
+ 'result-status': status,
+ 'result-jobid': 'job12345',
+ }
+
+
+def job_info(state, error):
+ return {
+ 'num-records': 1,
+ 'attributes': {
+ 'job-info': {
+ 'job-state': state,
+ 'job-progress': 'progress',
+ 'job-completion': error,
+ }
+ }
+ }
+
+
+ZRR = zapi_responses({
+ 'flexcache_get_info': build_zapi_response(flexcache_get_info, 1),
+ 'flexcache_get_info_double': build_zapi_response(flexcache_get_info_double, 2),
+ 'job_running': build_zapi_response(job_info('running', None)),
+ 'job_success': build_zapi_response(job_info('success', None)),
+ 'job_error': build_zapi_response(job_info('failure', 'failure')),
+ 'job_error_no_completion': build_zapi_response(job_info('failure', None)),
+ 'job_other': build_zapi_response(job_info('other', 'other')),
+ 'result_async': build_zapi_response(results_info('in_progress')),
+ 'result_error': build_zapi_response(results_info('whatever')),
+ 'error_160': build_zapi_error(160, 'Volume volume on Vserver ansibleSVM must be unmounted before being taken offline or restricted'),
+ 'error_13001': build_zapi_error(13001, 'Volume volume in Vserver ansibleSVM must be offline to be deleted'),
+ 'error_15661': build_zapi_error(15661, 'Job not found'),
+ 'error_size': build_zapi_error('size', 'Size "50MB" ("52428800B") is too small. Minimum size is "80MB" ("83886080B")'),
+})
+
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'volume': 'flexcache_volume',
+ 'vserver': 'vserver',
+}
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ register_responses([
+ ])
+ module_args = {
+ 'use_rest': 'never'
+ }
+ error = 'missing required arguments:'
+ assert error in call_main(my_main, {}, module_args, fail=True)['msg']
+
+
+def test_missing_parameters():
+ ''' fail if origin volume and origin verser are missing '''
+ register_responses([
+ ('ZAPI', 'flexcache-get-iter', ZRR['no_records']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ }
+ error = 'Missing parameters:'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_missing_parameter():
+ ''' fail if origin verser parameter is missing '''
+ register_responses([
+ ('ZAPI', 'flexcache-get-iter', ZRR['no_records']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'origin_volume': 'origin_volume',
+ }
+ error = 'Missing parameter: origin_vserver'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_get_flexcache():
+ ''' get flexcache info '''
+ register_responses([
+ ('ZAPI', 'flexcache-get-iter', ZRR['flexcache_get_info']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'origin_volume': 'origin_volume',
+ 'origin_cluster': 'origin_cluster',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ info = my_obj.flexcache_get()
+ assert info
+ assert 'origin_cluster' in info
+
+
+def test_get_flexcache_double():
+ ''' get flexcache info returns 2 entries! '''
+ register_responses([
+ ('ZAPI', 'flexcache-get-iter', ZRR['flexcache_get_info_double']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'origin_volume': 'origin_volume',
+
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ error = 'Error fetching FlexCache info: Multiple records found for %s:' % DEFAULT_ARGS['volume']
+ assert error in expect_and_capture_ansible_exception(my_obj.flexcache_get, 'fail')['msg']
+
+
+def test_create_flexcache():
+ ''' create flexcache '''
+ register_responses([
+ ('ZAPI', 'flexcache-get-iter', ZRR['no_records']),
+ ('ZAPI', 'flexcache-create-async', ZRR['result_async']),
+ ('ZAPI', 'job-get', ZRR['job_success']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'size': '90', # 80MB minimum
+ 'size_unit': 'mb', # 80MB minimum
+ 'aggr_list': 'aggr1',
+ 'origin_volume': 'fc_vol_origin',
+ 'origin_vserver': 'ansibleSVM',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_create_flexcach_no_wait():
+ ''' create flexcache '''
+ register_responses([
+ ('ZAPI', 'flexcache-get-iter', ZRR['no_records']),
+ ('ZAPI', 'flexcache-create-async', ZRR['result_async']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'size': '90', # 80MB minimum
+ 'size_unit': 'mb', # 80MB minimum
+ 'aggr_list': 'aggr1',
+ 'origin_volume': 'fc_vol_origin',
+ 'origin_vserver': 'ansibleSVM',
+ 'time_out': 0
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_create_flexcache():
+ ''' create flexcache '''
+ register_responses([
+ ('ZAPI', 'flexcache-get-iter', ZRR['no_records']),
+ ('ZAPI', 'flexcache-create-async', ZRR['result_error']),
+ # 2nd run
+ ('ZAPI', 'flexcache-get-iter', ZRR['no_records']),
+ ('ZAPI', 'flexcache-create-async', ZRR['result_async']),
+ ('ZAPI', 'job-get', ZRR['error']),
+ # 3rd run
+ ('ZAPI', 'flexcache-get-iter', ZRR['no_records']),
+ ('ZAPI', 'flexcache-create-async', ZRR['result_async']),
+ ('ZAPI', 'job-get', ZRR['job_error']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'size': '90', # 80MB minimum
+ 'size_unit': 'mb', # 80MB minimum
+ 'aggr_list': 'aggr1',
+ 'origin_volume': 'fc_vol_origin',
+ 'origin_vserver': 'ansibleSVM',
+ }
+ error = 'Unexpected error when creating flexcache: results is:'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ error = zapi_error_message('Error fetching job info')
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ error = 'Error when creating flexcache'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_create_flexcache_idempotent():
+ ''' create flexcache - already exists '''
+ register_responses([
+ ('ZAPI', 'flexcache-get-iter', ZRR['flexcache_get_info']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'aggr_list': 'aggr1',
+ 'origin_volume': 'ovolume',
+ 'origin_vserver': 'ovserver',
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_create_flexcache_autoprovision():
+ ''' create flexcache with autoprovision'''
+ register_responses([
+ ('ZAPI', 'flexcache-get-iter', ZRR['no_records']),
+ ('ZAPI', 'flexcache-create-async', ZRR['result_async']),
+ ('ZAPI', 'job-get', ZRR['job_success']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'size': '90', # 80MB minimum
+ 'size_unit': 'mb', # 80MB minimum
+ 'auto_provision_as': 'flexgroup',
+ 'origin_volume': 'fc_vol_origin',
+ 'origin_vserver': 'ansibleSVM',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_create_flexcache_autoprovision_idempotent():
+ ''' create flexcache with autoprovision - already exists '''
+ register_responses([
+ ('ZAPI', 'flexcache-get-iter', ZRR['flexcache_get_info']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'origin_volume': 'ovolume',
+ 'origin_vserver': 'ovserver',
+ 'auto_provision_as': 'flexgroup',
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_create_flexcache_multiplier():
+ ''' create flexcache with aggregate multiplier'''
+ register_responses([
+ ('ZAPI', 'flexcache-get-iter', ZRR['no_records']),
+ ('ZAPI', 'flexcache-create-async', ZRR['result_async']),
+ ('ZAPI', 'job-get', ZRR['job_success']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'size': '90', # 80MB minimum
+ 'size_unit': 'mb', # 80MB minimum
+ 'aggr_list': 'aggr1',
+ 'origin_volume': 'fc_vol_origin',
+ 'origin_vserver': 'ansibleSVM',
+ 'aggr_list_multiplier': 2,
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_create_flexcache_multiplier_idempotent():
+ ''' create flexcache with aggregate multiplier - already exists '''
+ register_responses([
+ ('ZAPI', 'flexcache-get-iter', ZRR['flexcache_get_info']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'aggr_list': 'aggr1',
+ 'origin_volume': 'ovolume',
+ 'origin_vserver': 'ovserver',
+ 'aggr_list_multiplier': 2,
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_delete_flexcache_exists_no_force():
+ ''' delete flexcache '''
+ register_responses([
+ ('ZAPI', 'flexcache-get-iter', ZRR['flexcache_get_info']),
+ ('ZAPI', 'flexcache-destroy-async', ZRR['error_13001']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'state': 'absent'
+ }
+ error = zapi_error_message('Error deleting FlexCache', 13001, 'Volume volume in Vserver ansibleSVM must be offline to be deleted')
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_delete_flexcache_exists_with_force():
+ ''' delete flexcache '''
+ register_responses([
+ ('ZAPI', 'flexcache-get-iter', ZRR['flexcache_get_info']),
+ ('ZAPI', 'volume-offline', ZRR['success']),
+ ('ZAPI', 'flexcache-destroy-async', ZRR['result_async']),
+ ('ZAPI', 'job-get', ZRR['job_success']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'force_offline': 'true',
+ 'state': 'absent'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_delete_flexcache_exists_with_force_no_wait():
+ ''' delete flexcache '''
+ register_responses([
+ ('ZAPI', 'flexcache-get-iter', ZRR['flexcache_get_info']),
+ ('ZAPI', 'volume-offline', ZRR['success']),
+ ('ZAPI', 'flexcache-destroy-async', ZRR['result_async']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'force_offline': 'true',
+ 'time_out': 0,
+ 'state': 'absent'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_delete_flexcache_exists_junctionpath_no_force():
+ ''' delete flexcache '''
+ register_responses([
+ ('ZAPI', 'flexcache-get-iter', ZRR['flexcache_get_info']),
+ ('ZAPI', 'volume-offline', ZRR['success']),
+ ('ZAPI', 'flexcache-destroy-async', ZRR['error_160']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'force_offline': 'true',
+ 'junction_path': 'jpath',
+ 'state': 'absent'
+ }
+ error = zapi_error_message('Error deleting FlexCache', 160,
+ 'Volume volume on Vserver ansibleSVM must be unmounted before being taken offline or restricted')
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_delete_flexcache_exists_junctionpath_with_force():
+ ''' delete flexcache '''
+ register_responses([
+ ('ZAPI', 'flexcache-get-iter', ZRR['flexcache_get_info']),
+ ('ZAPI', 'volume-unmount', ZRR['success']),
+ ('ZAPI', 'volume-offline', ZRR['success']),
+ ('ZAPI', 'flexcache-destroy-async', ZRR['result_async']),
+ ('ZAPI', 'job-get', ZRR['job_success']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'force_offline': 'true',
+ 'junction_path': 'jpath',
+ 'force_unmount': 'true',
+ 'state': 'absent'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_delete_flexcache_not_exist():
+ ''' delete flexcache '''
+ register_responses([
+ ('ZAPI', 'flexcache-get-iter', ZRR['no_records']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'state': 'absent'
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_delete_flexcache_exists_with_force():
+ ''' delete flexcache '''
+ register_responses([
+ ('ZAPI', 'flexcache-get-iter', ZRR['flexcache_get_info']),
+ ('ZAPI', 'volume-offline', ZRR['success']),
+ ('ZAPI', 'flexcache-destroy-async', ZRR['result_error']),
+ # 2nd run
+ ('ZAPI', 'flexcache-get-iter', ZRR['flexcache_get_info']),
+ ('ZAPI', 'volume-offline', ZRR['success']),
+ ('ZAPI', 'flexcache-destroy-async', ZRR['result_async']),
+ ('ZAPI', 'job-get', ZRR['error']),
+ # 3rd run
+ ('ZAPI', 'flexcache-get-iter', ZRR['flexcache_get_info']),
+ ('ZAPI', 'volume-offline', ZRR['success']),
+ ('ZAPI', 'flexcache-destroy-async', ZRR['result_async']),
+ ('ZAPI', 'job-get', ZRR['job_error']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'force_offline': 'true',
+ 'state': 'absent'
+ }
+ error = 'Unexpected error when deleting flexcache: results is:'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ error = zapi_error_message('Error fetching job info')
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ error = 'Error when deleting flexcache'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_create_flexcache_size_error():
+ ''' create flexcache '''
+ register_responses([
+ ('ZAPI', 'flexcache-get-iter', ZRR['no_records']),
+ ('ZAPI', 'flexcache-create-async', ZRR['error_size']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'size': '50', # 80MB minimum
+ 'size_unit': 'mb', # 80MB minimum
+ 'aggr_list': 'aggr1',
+ 'origin_volume': 'fc_vol_origin',
+ 'origin_vserver': 'ansibleSVM',
+ }
+ error = zapi_error_message('Error creating FlexCache', 'size', 'Size "50MB" ("52428800B") is too small. Minimum size is "80MB" ("83886080B")')
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+@patch('time.sleep')
+def test_create_flexcache_time_out(dont_sleep):
+ ''' create flexcache '''
+ register_responses([
+ ('ZAPI', 'flexcache-get-iter', ZRR['no_records']),
+ ('ZAPI', 'flexcache-create-async', ZRR['result_async']),
+ ('ZAPI', 'job-get', ZRR['job_running']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'size': '50', # 80MB minimum
+ 'size_unit': 'mb', # 80MB minimum
+ 'aggr_list': 'aggr1',
+ 'origin_volume': 'fc_vol_origin',
+ 'origin_vserver': 'ansibleSVM',
+ 'time_out': '2',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ error = 'Error when creating flexcache: job completion exceeded expected timer of: 2 seconds'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_error_zapi():
+ ''' error in ZAPI calls '''
+ register_responses([
+ ('ZAPI', 'flexcache-get-iter', ZRR['error']),
+ ('ZAPI', 'volume-offline', ZRR['error']),
+ ('ZAPI', 'volume-unmount', ZRR['error']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ error = zapi_error_message('Error fetching FlexCache info')
+ assert error in expect_and_capture_ansible_exception(my_obj.flexcache_get, 'fail')['msg']
+ error = zapi_error_message('Error offlining FlexCache volume')
+ assert error in expect_and_capture_ansible_exception(my_obj.volume_offline, 'fail', None)['msg']
+ error = zapi_error_message('Error unmounting FlexCache volume')
+ assert error in expect_and_capture_ansible_exception(my_obj.volume_unmount, 'fail', None)['msg']
+
+
+def test_check_job_status():
+ ''' check_job_status '''
+ register_responses([
+ # job not found
+ ('ZAPI', 'job-get', ZRR['error_15661']),
+ ('ZAPI', 'vserver-get-iter', ZRR['no_records']),
+ ('ZAPI', 'job-get', ZRR['error_15661']),
+ # cserver job not found
+ ('ZAPI', 'job-get', ZRR['error_15661']),
+ ('ZAPI', 'vserver-get-iter', ZRR['cserver']),
+ ('ZAPI', 'job-get', ZRR['error_15661']),
+ # missing job-completion
+ ('ZAPI', 'job-get', ZRR['job_error_no_completion']),
+ # bad status
+ ('ZAPI', 'job-get', ZRR['job_other']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ # error = zapi_error_message('Error fetching FlexCache info')
+ error = 'cannot locate job with id: 1'
+ assert error in my_obj.check_job_status('1')
+ assert error in my_obj.check_job_status('1')
+ assert 'progress' in my_obj.check_job_status('1')
+ error = 'Unexpected job status in:'
+ assert error in expect_and_capture_ansible_exception(my_obj.check_job_status, 'fail', '1')['msg']
+
+
+# REST API canned responses when mocking send_request
+SRR = rest_responses({
+ # common responses
+ 'is_rest': (200, dict(version=dict(generation=9, major=9, minor=0, full='dummy')), None),
+ 'is_rest_9_8': (200, dict(version=dict(generation=9, major=8, minor=0, full='dummy')), None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'zero_record': (200, dict(records=[], num_records=0), None),
+ 'one_record_uuid': (200, dict(records=[dict(uuid='a1b2c3')], num_records=1), None),
+ 'end_of_sequence': (500, None, "Unexpected call to send_request"),
+ 'generic_error': (400, None, "Expected error"),
+ 'one_flexcache_record': (200, dict(records=[
+ dict(uuid='a1b2c3',
+ name='flexcache_volume',
+ svm=dict(name='vserver'),
+ )
+ ], num_records=1), None),
+ 'one_flexcache_record_with_path': (200, dict(records=[
+ dict(uuid='a1b2c3',
+ name='flexcache_volume',
+ svm=dict(name='vserver'),
+ path='path'
+ )
+ ], num_records=1), None),
+})
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.has_netapp_lib')
+def test_fail_netapp_lib_error(mock_has_netapp_lib):
+ mock_has_netapp_lib.return_value = False
+ module_args = {
+ "use_rest": "never"
+ }
+ assert 'Error: the python NetApp-Lib module is required. Import error: None' == call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_rest_missing_arguments():
+ ''' create flexcache '''
+ register_responses([
+
+ ])
+ args = dict(DEFAULT_ARGS)
+ del args['hostname']
+ module_args = {
+ 'use_rest': 'always',
+ }
+ error = 'missing required arguments: hostname'
+ assert error in call_main(my_main, args, module_args, fail=True)['msg']
+
+
+def test_rest_create():
+ ''' create flexcache '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/flexcache/flexcaches', SRR['zero_record']),
+ ('POST', 'storage/flexcache/flexcaches', SRR['success']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'size': '50', # 80MB minimum
+ 'size_unit': 'mb', # 80MB minimum
+ 'aggr_list': 'aggr1',
+ 'origin_volume': 'fc_vol_origin',
+ 'origin_vserver': 'ansibleSVM',
+ 'origin_cluster': 'ocluster',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_create_no_action():
+ ''' create flexcache idempotent '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/flexcache/flexcaches', SRR['one_flexcache_record']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'aggr_list': 'aggr1',
+ 'origin_volume': 'fc_vol_origin',
+ 'origin_vserver': 'ansibleSVM',
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_delete_no_action():
+ ''' delete flexcache '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/flexcache/flexcaches', SRR['zero_record']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'state': 'absent'
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_delete():
+ ''' delete flexcache '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/flexcache/flexcaches', SRR['one_flexcache_record']),
+ ('DELETE', 'storage/flexcache/flexcaches/a1b2c3', SRR['empty_good']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'state': 'absent'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_delete_with_force():
+ ''' delete flexcache, since there is no path, unmount is not called '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/flexcache/flexcaches', SRR['one_flexcache_record']),
+ ('DELETE', 'storage/flexcache/flexcaches/a1b2c3', SRR['empty_good']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'force_unmount': True,
+ 'state': 'absent'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_delete_with_force_and_path():
+ ''' delete flexcache with unmount '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/flexcache/flexcaches', SRR['one_flexcache_record_with_path']),
+ ('PATCH', 'storage/volumes/a1b2c3', SRR['empty_good']),
+ ('DELETE', 'storage/flexcache/flexcaches/a1b2c3', SRR['empty_good']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'force_unmount': True,
+ 'state': 'absent'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_delete_with_force2_and_path():
+ ''' delete flexcache with unmount and offline'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/flexcache/flexcaches', SRR['one_flexcache_record_with_path']),
+ ('PATCH', 'storage/volumes/a1b2c3', SRR['empty_good']),
+ ('PATCH', 'storage/volumes/a1b2c3', SRR['empty_good']),
+ ('DELETE', 'storage/flexcache/flexcaches/a1b2c3', SRR['empty_good']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'force_offline': True,
+ 'force_unmount': True,
+ 'state': 'absent'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_modify_prepopulate_no_action():
+ ''' modify flexcache '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/flexcache/flexcaches', SRR['one_flexcache_record']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'aggr_list': 'aggr1',
+ 'origin_volume': 'fc_vol_origin',
+ 'origin_vserver': 'ansibleSVM',
+ 'prepopulate': {
+ 'dir_paths': ['/'],
+ 'force_prepopulate_if_already_created': False
+ }
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_modify_prepopulate():
+ ''' modify flexcache '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/flexcache/flexcaches', SRR['one_flexcache_record']),
+ ('PATCH', 'storage/flexcache/flexcaches/a1b2c3', SRR['empty_good']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'aggr_list': 'aggr1',
+ 'origin_volume': 'fc_vol_origin',
+ 'origin_vserver': 'ansibleSVM',
+ 'prepopulate': {
+ 'dir_paths': ['/'],
+ 'force_prepopulate_if_already_created': True
+ }
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_modify_prepopulate_default():
+ ''' modify flexcache '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/flexcache/flexcaches', SRR['one_flexcache_record']),
+ ('PATCH', 'storage/flexcache/flexcaches/a1b2c3', SRR['empty_good']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'aggr_list': 'aggr1',
+ 'origin_volume': 'fc_vol_origin',
+ 'origin_vserver': 'ansibleSVM',
+ 'prepopulate': {
+ 'dir_paths': ['/'],
+ }
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_modify_prepopulate_and_mount():
+ ''' modify flexcache '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/flexcache/flexcaches', SRR['one_flexcache_record']),
+ ('PATCH', 'storage/volumes/a1b2c3', SRR['empty_good']),
+ ('PATCH', 'storage/flexcache/flexcaches/a1b2c3', SRR['empty_good']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'aggr_list': 'aggr1',
+ 'origin_volume': 'fc_vol_origin',
+ 'origin_vserver': 'ansibleSVM',
+ 'prepopulate': {
+ 'dir_paths': ['/'],
+ },
+ 'path': '/mount_path'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_error_modify():
+ ''' create flexcache idempotent '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/flexcache/flexcaches', SRR['one_flexcache_record']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'aggr_list': 'aggr1',
+ 'volume': 'flexcache_volume2',
+ 'origin_volume': 'fc_vol_origin',
+ 'origin_vserver': 'ansibleSVM',
+ }
+ error = 'FlexCache properties cannot be modified by this module. modify:'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_rest_warn_prepopulate():
+ ''' create flexcache idempotent '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/flexcache/flexcaches', SRR['one_flexcache_record']),
+ ('PATCH', 'storage/volumes/a1b2c3', SRR['success']),
+ ('PATCH', 'storage/flexcache/flexcaches/a1b2c3', SRR['success']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'aggr_list': 'aggr1',
+ 'volume': 'flexcache_volume',
+ 'origin_volume': 'fc_vol_origin',
+ 'origin_vserver': 'ansibleSVM',
+ 'prepopulate': {
+ 'dir_paths': ['/'],
+ 'force_prepopulate_if_already_created': True
+ },
+ 'junction_path': ''
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ print_warnings()
+ assert_warning_was_raised('na_ontap_flexcache is not idempotent when prepopulate is present and force_prepopulate_if_already_created=true')
+ assert_warning_was_raised('prepopulate requires the FlexCache volume to be mounted')
+
+
+def test_error_missing_uuid():
+ module_args = {
+ 'use_rest': 'akway',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ current = {}
+ error_template = 'Error in %s: Error, no uuid in current: {}'
+ error = error_template % 'rest_offline_volume'
+ assert error in expect_and_capture_ansible_exception(my_obj.rest_offline_volume, 'fail', current)['msg']
+ error = error_template % 'rest_mount_volume'
+ assert error in expect_and_capture_ansible_exception(my_obj.rest_mount_volume, 'fail', current, 'path')['msg']
+ error = error_template % 'flexcache_rest_delete'
+ assert error in expect_and_capture_ansible_exception(my_obj.flexcache_rest_delete, 'fail', current)['msg']
+
+
+def test_prepopulate_option_checks():
+ ''' create flexcache idempotent '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'prepopulate': {
+ 'dir_paths': ['/'],
+ 'force_prepopulate_if_already_created': True,
+ 'exclude_dir_paths': ['/']
+ },
+ }
+ error = 'Error: using prepopulate requires ONTAP 9.8 or later and REST must be enabled - ONTAP version: 9.7.0.'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ error = 'Error: using prepopulate: exclude_dir_paths requires ONTAP 9.9 or later and REST must be enabled - ONTAP version: 9.8.0.'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_fpolicy_event.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_fpolicy_event.py
new file mode 100644
index 000000000..a679f9ded
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_fpolicy_event.py
@@ -0,0 +1,338 @@
+''' unit tests ONTAP Ansible module: na_ontap_fpolicy_event '''
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat import unittest
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_fpolicy_event \
+ import NetAppOntapFpolicyEvent as fpolicy_event_module # module under test
+
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+# REST API canned responses when mocking send_request
+SRR = {
+ # common responses
+ 'is_rest': (200, {}, None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {"num_records": 0}, None),
+ 'end_of_sequence': (500, None, "Ooops, the UT needs one more SRR response"),
+ 'generic_error': (400, None, "Expected error"),
+ # module specific responses
+ 'fpolicy_event_record': (200, {
+ "num_records": 1,
+ "records": [{
+ 'svm': {'uuid': '3b21372b-64ae-11eb-8c0e-0050568176ec'},
+ 'name': 'my_event2',
+ 'volume_monitoring': False
+ }]
+ }, None),
+ 'vserver_uuid_record': (200, {
+ 'records': [{
+ 'uuid': '3b21372b-64ae-11eb-8c0e-0050568176ec'
+ }]
+ }, None)
+}
+
+
+class MockONTAPConnection(object):
+ ''' mock server connection to ONTAP host '''
+ def __init__(self, kind=None):
+ ''' save arguments '''
+ self.type = kind
+ self.xml_in = None
+ self.xml_out = None
+
+ def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
+ ''' mock invoke_successfully returning xml data '''
+ self.xml_in = xml
+ if self.type == 'fpolicy_event':
+ xml = self.build_fpolicy_event_info()
+ elif self.type == 'fpolicy_event_fail':
+ raise netapp_utils.zapi.NaApiError(code='TEST', message="This exception is from the unit test")
+ self.xml_out = xml
+ return xml
+
+ @staticmethod
+ def build_fpolicy_event_info():
+ ''' build xml data for fpolicy-policy-info '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ data = {
+ 'attributes-list': {
+ 'fpolicy-event-options-config': {
+ "event-name": "my_event2",
+ "vserver": "svm1",
+ 'volume-operation': "false"
+ }
+ }
+ }
+ xml.translate_struct(data)
+ return xml
+
+
+class TestMyModule(unittest.TestCase):
+ ''' a group of related Unit Tests '''
+
+ def setUp(self):
+ self.server = MockONTAPConnection()
+ self.onbox = False
+
+ def set_default_args(self, use_rest=None):
+ if self.onbox:
+ hostname = '10.10.10.10'
+ username = 'username'
+ password = 'password'
+ vserver = 'svm1'
+ name = 'my_event2'
+ volume_monitoring = False
+
+ else:
+ hostname = '10.10.10.10'
+ username = 'username'
+ password = 'password'
+ vserver = 'svm1'
+ name = 'my_event2'
+ volume_monitoring = False
+
+ args = dict({
+ 'state': 'present',
+ 'hostname': hostname,
+ 'username': username,
+ 'password': password,
+ 'vserver': vserver,
+ 'name': name,
+ 'volume_monitoring': volume_monitoring
+ })
+
+ if use_rest is not None:
+ args['use_rest'] = use_rest
+
+ return args
+
+ @staticmethod
+ def get_fpolicy_event_mock_object(cx_type='zapi', kind=None):
+ fpolicy_event_obj = fpolicy_event_module()
+ if cx_type == 'zapi':
+ if kind is None:
+ fpolicy_event_obj.server = MockONTAPConnection()
+ else:
+ fpolicy_event_obj.server = MockONTAPConnection(kind=kind)
+ return fpolicy_event_obj
+
+ def test_module_fail_when_required_args_missing(self):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ fpolicy_event_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+ def test_ensure_get_called(self):
+ ''' test get_fpolicy_event for non-existent config'''
+ set_module_args(self.set_default_args(use_rest='Never'))
+ print('starting')
+ my_obj = fpolicy_event_module()
+ print('use_rest:', my_obj.use_rest)
+ my_obj.server = self.server
+ assert my_obj.get_fpolicy_event is not None
+
+ def test_ensure_get_called_existing(self):
+ ''' test get_fpolicy_event_config for existing config'''
+ set_module_args(self.set_default_args(use_rest='Never'))
+ my_obj = fpolicy_event_module()
+ my_obj.server = MockONTAPConnection(kind='fpolicy_event')
+ assert my_obj.get_fpolicy_event()
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_fpolicy_event.NetAppOntapFpolicyEvent.create_fpolicy_event')
+ def test_successful_create(self, create_fpolicy_event):
+ ''' creating fpolicy_event and test idempotency '''
+ set_module_args(self.set_default_args(use_rest='Never'))
+ my_obj = fpolicy_event_module()
+ if not self.onbox:
+ my_obj.server = self.server
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+ create_fpolicy_event.assert_called_with()
+ # to reset na_helper from remembering the previous 'changed' value
+ set_module_args(self.set_default_args(use_rest='Never'))
+ my_obj = fpolicy_event_module()
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('fpolicy_event')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_fpolicy_event.NetAppOntapFpolicyEvent.delete_fpolicy_event')
+ def test_successful_delete(self, delete_fpolicy_event):
+ ''' delete fpolicy_event and test idempotency '''
+ data = self.set_default_args(use_rest='Never')
+ data['state'] = 'absent'
+ set_module_args(data)
+ my_obj = fpolicy_event_module()
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('fpolicy_event')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+ # to reset na_helper from remembering the previous 'changed' value
+ my_obj = fpolicy_event_module()
+ if not self.onbox:
+ my_obj.server = self.server
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_fpolicy_event.NetAppOntapFpolicyEvent.modify_fpolicy_event')
+ def test_successful_modify(self, modify_fpolicy_event):
+ ''' modifying fpolicy_event config and testing idempotency '''
+ data = self.set_default_args(use_rest='Never')
+ data['volume_monitoring'] = True
+ set_module_args(data)
+ my_obj = fpolicy_event_module()
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('fpolicy_event')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+ # to reset na_helper from remembering the previous 'changed' value
+ data['volume_monitoring'] = False
+ set_module_args(data)
+ my_obj = fpolicy_event_module()
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('fpolicy_event')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert not exc.value.args[0]['changed']
+
+ def test_if_all_methods_catch_exception(self):
+ data = self.set_default_args(use_rest='Never')
+ set_module_args(data)
+ my_obj = fpolicy_event_module()
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('fpolicy_event_fail')
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.create_fpolicy_event()
+ assert 'Error creating fPolicy policy event ' in exc.value.args[0]['msg']
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.delete_fpolicy_event()
+ assert 'Error deleting fPolicy policy event ' in exc.value.args[0]['msg']
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.modify_fpolicy_event(modify={})
+ assert 'Error modifying fPolicy policy event ' in exc.value.args[0]['msg']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_error(self, mock_request):
+ data = self.set_default_args()
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['generic_error'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_fpolicy_event_mock_object(cx_type='rest').apply()
+ assert exc.value.args[0]['msg'] == SRR['generic_error'][2]
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_successful_create_rest(self, mock_request):
+ data = self.set_default_args()
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['vserver_uuid_record'],
+ SRR['empty_good'],
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_fpolicy_event_mock_object(cx_type='rest').apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_idempotent_create_rest(self, mock_request):
+ data = self.set_default_args()
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['vserver_uuid_record'],
+ SRR['fpolicy_event_record'],
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_fpolicy_event_mock_object(cx_type='rest').apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_successful_delete_rest(self, mock_request):
+ data = self.set_default_args()
+ data['state'] = 'absent'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['vserver_uuid_record'],
+ SRR['fpolicy_event_record'], # get
+ SRR['empty_good'], # delete
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_fpolicy_event_mock_object(cx_type='rest').apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_idempotent_delete_rest(self, mock_request):
+ data = self.set_default_args()
+ data['state'] = 'absent'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['vserver_uuid_record'],
+ SRR['empty_good'], # get
+ SRR['empty_good'], # get
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_fpolicy_event_mock_object(cx_type='rest').apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_successful_modify_rest(self, mock_request):
+ data = self.set_default_args()
+ data['state'] = 'present'
+ data['volume_monitoring'] = True
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['vserver_uuid_record'],
+ SRR['fpolicy_event_record'], # get
+ SRR['empty_good'], # no response for modify
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_fpolicy_event_mock_object(cx_type='rest').apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_idempotent_modify_rest(self, mock_request):
+ data = self.set_default_args()
+ data['state'] = 'present'
+ data['volume_monitoring'] = False
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['vserver_uuid_record'],
+ SRR['fpolicy_event_record'], # get
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_fpolicy_event_mock_object(cx_type='rest').apply()
+ assert not exc.value.args[0]['changed']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_fpolicy_ext_engine.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_fpolicy_ext_engine.py
new file mode 100644
index 000000000..c2304876c
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_fpolicy_ext_engine.py
@@ -0,0 +1,395 @@
+# (c) 2021, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test for ONTAP fpolicy ext engine Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_fpolicy_ext_engine \
+ import NetAppOntapFpolicyExtEngine as my_module # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+class MockONTAPConnection():
+ ''' mock server connection to ONTAP host '''
+
+ def __init__(self, kind=None):
+ ''' save arguments '''
+ self.type = kind
+ self.xml_in = None
+ self.xml_out = None
+
+ def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
+ ''' mock invoke_successfully returning xml data '''
+ self.xml_in = xml
+ if self.type == 'fpolicy_ext_engine':
+ xml = self.build_fpolicy_ext_engine_info()
+ elif self.type == 'fpolicy_ext_engine_fail':
+ raise netapp_utils.zapi.NaApiError(code='TEST', message="This exception is from the unit test")
+ self.xml_out = xml
+ return xml
+
+ @staticmethod
+ def build_fpolicy_ext_engine_info():
+ ''' build xml data for fpolicy-policy-info '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ data = {
+ 'attributes-list': {
+ 'fpolicy-external-engine-info': {
+ 'vserver': 'svm1',
+ 'engine-name': 'engine1',
+ 'primary-servers': [
+ {'ip-address': '10.11.12.13'}
+ ],
+ 'port-number': '8787',
+ 'extern-engine-type': 'asynchronous',
+ 'ssl-option': 'no_auth'
+ }
+ }
+ }
+ xml.translate_struct(data)
+ return xml
+
+
+def default_args():
+ args = {
+ 'vserver': 'svm1',
+ 'name': 'engine1',
+ 'primary_servers': '10.11.12.13',
+ 'port': 8787,
+ 'extern_engine_type': 'asynchronous',
+ 'ssl_option': 'no_auth',
+ 'hostname': '10.10.10.10',
+ 'username': 'username',
+ 'password': 'password',
+ 'use_rest': 'always'
+ }
+ return args
+
+
+# REST API canned responses when mocking send_request
+SRR = {
+ # common responses
+ 'is_rest': (200, dict(version=dict(generation=9, major=9, minor=0, full='dummy')), None),
+ 'is_rest_9_8': (200, dict(version=dict(generation=9, major=8, minor=0, full='dummy')), None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'zero_record': (200, dict(records=[], num_records=0), None),
+ 'one_record_uuid': (200, dict(records=[dict(uuid='a1b2c3')], num_records=1), None),
+ 'end_of_sequence': (500, None, "Unexpected call to send_request"),
+ 'generic_error': (400, None, "Expected error"),
+ 'one_fpolicy_ext_engine_record': (200, {
+ "records": [{
+ 'engine-name': 'engine1',
+ 'vserver': 'svm1',
+ 'primary-servers': ['10.11.12.13'],
+ 'port': 8787,
+ 'extern-engine-type': 'asynchronous',
+ 'ssl-option': 'no-auth'
+ }],
+ 'num_records': 1
+ }, None)
+
+}
+
+
+def get_fpolicy_ext_engine_mock_object(cx_type='zapi', kind=None):
+ fpolicy_ext_engine_obj = my_module()
+ if cx_type == 'zapi':
+ if kind is None:
+ fpolicy_ext_engine_obj.server = MockONTAPConnection()
+ else:
+ fpolicy_ext_engine_obj.server = MockONTAPConnection(kind=kind)
+ return fpolicy_ext_engine_obj
+
+
+def test_module_fail_when_required_args_missing(patch_ansible):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ my_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+
+def test_ensure_get_called(patch_ansible):
+ ''' test get_fpolicy_ext_engine for non-existent engine'''
+ args = dict(default_args())
+ args['use_rest'] = 'never'
+ set_module_args(args)
+ print('starting')
+ my_obj = my_module()
+ print('use_rest:', my_obj.use_rest)
+ my_obj.server = MockONTAPConnection()
+ assert my_obj.get_fpolicy_ext_engine is not None
+
+
+def test_rest_missing_arguments(patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' create fpolicy ext engine '''
+ args = dict(default_args())
+ del args['hostname']
+ set_module_args(args)
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_module()
+ msg = 'missing required arguments: hostname'
+ assert exc.value.args[0]['msg'] == msg
+
+
+@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_fpolicy_ext_engine.NetAppOntapFpolicyExtEngine.create_fpolicy_ext_engine')
+def test_successful_create(self, patch_ansible):
+ ''' creating fpolicy_ext_engine and test idempotency '''
+ args = dict(default_args())
+ args['use_rest'] = 'never'
+ set_module_args(args)
+ my_obj = my_module()
+ my_obj.server = MockONTAPConnection()
+ with patch.object(my_module, 'create_fpolicy_ext_engine', wraps=my_obj.create_fpolicy_ext_engine) as mock_create:
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Create: ' + repr(exc.value))
+ assert exc.value.args[0]['changed']
+ mock_create.assert_called_with()
+ # test idempotency
+ args = dict(default_args())
+ args['use_rest'] = 'never'
+ set_module_args(args)
+ my_obj = my_module()
+ my_obj.server = MockONTAPConnection('fpolicy_ext_engine')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Create: ' + repr(exc.value))
+ assert not exc.value.args[0]['changed']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_fpolicy_ext_engine.NetAppOntapFpolicyExtEngine.delete_fpolicy_ext_engine')
+def test_successful_delete(self, patch_ansible):
+ ''' delete fpolicy_ext_engine and test idempotency '''
+ args = dict(default_args())
+ args['use_rest'] = 'never'
+ args['state'] = 'absent'
+ set_module_args(args)
+ my_obj = my_module()
+ my_obj.server = MockONTAPConnection('fpolicy_ext_engine')
+ with patch.object(my_module, 'delete_fpolicy_ext_engine', wraps=my_obj.delete_fpolicy_ext_engine) as mock_delete:
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Delete: ' + repr(exc.value))
+ assert exc.value.args[0]['changed']
+ mock_delete.assert_called_with()
+ # test idempotency
+ args = dict(default_args())
+ args['use_rest'] = 'never'
+ args['state'] = 'absent'
+ set_module_args(args)
+ my_obj = my_module()
+ my_obj.server = MockONTAPConnection()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Delete: ' + repr(exc.value))
+ assert not exc.value.args[0]['changed']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_fpolicy_ext_engine.NetAppOntapFpolicyExtEngine.modify_fpolicy_ext_engine')
+def test_successful_modify(self, patch_ansible):
+ ''' modifying fpolicy_ext_engine and testing idempotency '''
+ args = dict(default_args())
+ args['use_rest'] = 'never'
+ args['port'] = '9999'
+ set_module_args(args)
+ my_obj = my_module()
+ my_obj.server = MockONTAPConnection('fpolicy_ext_engine')
+ with patch.object(my_module, 'modify_fpolicy_ext_engine', wraps=my_obj.modify_fpolicy_ext_engine) as mock_modify:
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Modify: ' + repr(exc.value))
+ assert exc.value.args[0]['changed']
+ mock_modify.assert_called_with({'port': 9999})
+ # test idempotency
+ args = dict(default_args())
+ args['use_rest'] = 'never'
+ set_module_args(args)
+ my_obj = my_module()
+ my_obj.server = MockONTAPConnection('fpolicy_ext_engine')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Modify: ' + repr(exc.value))
+ assert not exc.value.args[0]['changed']
+
+
+def test_if_all_methods_catch_exception(patch_ansible):
+ args = dict(default_args())
+ args['use_rest'] = 'never'
+ set_module_args(args)
+ my_obj = my_module()
+ my_obj.server = MockONTAPConnection('fpolicy_ext_engine_fail')
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.create_fpolicy_ext_engine()
+ assert 'Error creating fPolicy external engine ' in exc.value.args[0]['msg']
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.delete_fpolicy_ext_engine()
+ assert 'Error deleting fPolicy external engine ' in exc.value.args[0]['msg']
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.modify_fpolicy_ext_engine(modify={})
+ assert 'Error modifying fPolicy external engine ' in exc.value.args[0]['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_create(mock_request, patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' create fpolicy ext engine '''
+ args = dict(default_args())
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['zero_record'], # get
+ SRR['empty_good'], # post
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is True
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 3
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_create_no_action(mock_request, patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' create fpolicy ext engine idempotent '''
+ args = dict(default_args())
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['one_fpolicy_ext_engine_record'], # get
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is False
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 2
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_delete_no_action(mock_request, patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' delete fpolicy ext engine '''
+ args = dict(default_args())
+ args['state'] = 'absent'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['zero_record'], # get
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is False
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 2
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_delete(mock_request, patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' delete fpolicy ext engine '''
+ args = dict(default_args())
+ args['state'] = 'absent'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['one_fpolicy_ext_engine_record'], # get
+ SRR['empty_good'], # delete
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is True
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 3
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_modify_no_action(mock_request, patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' modify fpolicy ext engine '''
+ args = dict(default_args())
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['one_fpolicy_ext_engine_record'], # get
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is False
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 2
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_modify_prepopulate(mock_request, patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' modify fpolicy ext engine '''
+ args = dict(default_args())
+ args['port'] = 9999
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['one_fpolicy_ext_engine_record'], # get
+ SRR['empty_good'], # patch
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is True
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 3
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_delete_no_action(mock_request, patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' delete fpolicy ext engine '''
+ args = dict(default_args())
+ args['state'] = 'absent'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['empty_good'], # get
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is False
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 2
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_modify_prepopulate(mock_request, patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' delete fpolicy ext engine '''
+ args = dict(default_args())
+ args['state'] = 'absent'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['one_fpolicy_ext_engine_record'], # get
+ SRR['empty_good'], # delete
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is True
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 3
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_fpolicy_policy.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_fpolicy_policy.py
new file mode 100644
index 000000000..fe065af33
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_fpolicy_policy.py
@@ -0,0 +1,339 @@
+''' unit tests ONTAP Ansible module: na_ontap_fpolicy_policy '''
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat import unittest
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_fpolicy_policy \
+ import NetAppOntapFpolicyPolicy as fpolicy_policy_module # module under test
+
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+# REST API canned responses when mocking send_request
+SRR = {
+ # common responses
+ 'is_rest': (200, {}, None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'end_of_sequence': (500, None, "Ooops, the UT needs one more SRR response"),
+ 'generic_error': (400, None, "Expected error"),
+ # module specific responses
+ 'fpolicy_policy_record': (200, {
+ "records": [{
+ "vserver": "svm1",
+ "policy_name": "policy1",
+ "events": ['my_event'],
+ "engine": "native",
+ "is_mandatory": False,
+ "allow_privileged_access": False,
+ "is_passthrough_read_enabled": False
+ }]
+ }, None)
+}
+
+
+class MockONTAPConnection(object):
+ ''' mock server connection to ONTAP host '''
+
+ def __init__(self, kind=None):
+ ''' save arguments '''
+ self.type = kind
+ self.xml_in = None
+ self.xml_out = None
+
+ def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
+ ''' mock invoke_successfully returning xml data '''
+ self.xml_in = xml
+ if self.type == 'fpolicy_policy':
+ xml = self.build_fpolicy_policy_info()
+ elif self.type == 'fpolicy_policy_fail':
+ raise netapp_utils.zapi.NaApiError(code='TEST', message="This exception is from the unit test")
+ self.xml_out = xml
+ return xml
+
+ @staticmethod
+ def build_fpolicy_policy_info():
+ ''' build xml data for fpolicy-policy-info '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ data = {
+ 'attributes-list': {
+ 'fpolicy-policy-info': {
+ "vserver": "svm1",
+ "policy-name": "policy1",
+ "events": [
+ {'event-name': 'my_event'}
+ ],
+ "engine-name": "native",
+ "is-mandatory": "False",
+ "allow-privileged-access": "False",
+ "is-passthrough-read-enabled": "False"
+ }
+ }
+ }
+ xml.translate_struct(data)
+ return xml
+
+
+class TestMyModule(unittest.TestCase):
+ ''' a group of related Unit Tests '''
+
+ def setUp(self):
+ self.server = MockONTAPConnection()
+ self.onbox = False
+
+ def set_default_args(self, use_rest=None):
+ if self.onbox:
+ hostname = '10.10.10.10'
+ username = 'username'
+ password = 'password'
+ vserver = 'svm1'
+ name = 'policy1'
+ events = 'my_event'
+ is_mandatory = False
+
+ else:
+ hostname = '10.10.10.10'
+ username = 'username'
+ password = 'password'
+ vserver = 'svm1'
+ name = 'policy1'
+ events = 'my_event'
+ is_mandatory = False
+
+ args = dict({
+ 'state': 'present',
+ 'hostname': hostname,
+ 'username': username,
+ 'password': password,
+ 'vserver': vserver,
+ 'name': name,
+ 'events': events,
+ 'is_mandatory': is_mandatory
+ })
+
+ if use_rest is not None:
+ args['use_rest'] = use_rest
+
+ return args
+
+ @staticmethod
+ def get_fpolicy_policy_mock_object(cx_type='zapi', kind=None):
+ fpolicy_policy_obj = fpolicy_policy_module()
+ if cx_type == 'zapi':
+ if kind is None:
+ fpolicy_policy_obj.server = MockONTAPConnection()
+ else:
+ fpolicy_policy_obj.server = MockONTAPConnection(kind=kind)
+ return fpolicy_policy_obj
+
+ def test_module_fail_when_required_args_missing(self):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ fpolicy_policy_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+ def test_ensure_get_called(self):
+ ''' test get_fpolicy_policy for non-existent config'''
+ set_module_args(self.set_default_args(use_rest='Never'))
+ print('starting')
+ my_obj = fpolicy_policy_module()
+ print('use_rest:', my_obj.use_rest)
+ my_obj.server = self.server
+ assert my_obj.get_fpolicy_policy is not None
+
+ def test_ensure_get_called_existing(self):
+ ''' test get_fpolicy_policy_config for existing config'''
+ set_module_args(self.set_default_args(use_rest='Never'))
+ my_obj = fpolicy_policy_module()
+ my_obj.server = MockONTAPConnection(kind='fpolicy_policy')
+ assert my_obj.get_fpolicy_policy()
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_fpolicy_policy.NetAppOntapFpolicyPolicy.create_fpolicy_policy')
+ def test_successful_create(self, create_fpolicy_policy):
+ ''' creating fpolicy_policy and test idempotency '''
+ set_module_args(self.set_default_args(use_rest='Never'))
+ my_obj = fpolicy_policy_module()
+ if not self.onbox:
+ my_obj.server = self.server
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+ create_fpolicy_policy.assert_called_with()
+ # to reset na_helper from remembering the previous 'changed' value
+ set_module_args(self.set_default_args(use_rest='Never'))
+ my_obj = fpolicy_policy_module()
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('fpolicy_policy')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_fpolicy_policy.NetAppOntapFpolicyPolicy.delete_fpolicy_policy')
+ def test_successful_delete(self, delete_fpolicy_policy):
+ ''' delete fpolicy_policy and test idempotency '''
+ data = self.set_default_args(use_rest='Never')
+ data['state'] = 'absent'
+ set_module_args(data)
+ my_obj = fpolicy_policy_module()
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('fpolicy_policy')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+ # to reset na_helper from remembering the previous 'changed' value
+ my_obj = fpolicy_policy_module()
+ if not self.onbox:
+ my_obj.server = self.server
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_fpolicy_policy.NetAppOntapFpolicyPolicy.modify_fpolicy_policy')
+ def test_successful_modify(self, modify_fpolicy_policy):
+ ''' modifying fpolicy_policy config and testing idempotency '''
+ data = self.set_default_args(use_rest='Never')
+ data['is_mandatory'] = True
+ set_module_args(data)
+ my_obj = fpolicy_policy_module()
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('fpolicy_policy')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+ # to reset na_helper from remembering the previous 'changed' value
+ data['is_mandatory'] = False
+ set_module_args(data)
+ my_obj = fpolicy_policy_module()
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('fpolicy_policy')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert not exc.value.args[0]['changed']
+
+ def test_if_all_methods_catch_exception(self):
+ data = self.set_default_args(use_rest='Never')
+ set_module_args(data)
+ my_obj = fpolicy_policy_module()
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('fpolicy_policy_fail')
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.create_fpolicy_policy()
+ assert 'Error creating fPolicy policy ' in exc.value.args[0]['msg']
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.delete_fpolicy_policy()
+ assert 'Error deleting fPolicy policy ' in exc.value.args[0]['msg']
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.modify_fpolicy_policy(modify={})
+ assert 'Error modifying fPolicy policy ' in exc.value.args[0]['msg']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_error(self, mock_request):
+ data = self.set_default_args()
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['generic_error'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_fpolicy_policy_mock_object(cx_type='rest').apply()
+ assert exc.value.args[0]['msg'] == SRR['generic_error'][2]
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_successful_create_rest(self, mock_request):
+ data = self.set_default_args()
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['empty_good'],
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_fpolicy_policy_mock_object(cx_type='rest').apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_idempotent_create_rest(self, mock_request):
+ data = self.set_default_args()
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['fpolicy_policy_record'],
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_fpolicy_policy_mock_object(cx_type='rest').apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_successful_delete_rest(self, mock_request):
+ data = self.set_default_args()
+ data['state'] = 'absent'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['fpolicy_policy_record'], # get
+ SRR['empty_good'], # delete
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_fpolicy_policy_mock_object(cx_type='rest').apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_idempotent_delete_rest(self, mock_request):
+ data = self.set_default_args()
+ data['state'] = 'absent'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['empty_good'], # get
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_fpolicy_policy_mock_object(cx_type='rest').apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_successful_modify_rest(self, mock_request):
+ data = self.set_default_args()
+ data['state'] = 'present'
+ data['is_mandatory'] = 'True'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['fpolicy_policy_record'], # get
+ SRR['empty_good'], # no response for modify
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_fpolicy_policy_mock_object(cx_type='rest').apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_idempotent_modify_rest(self, mock_request):
+ data = self.set_default_args()
+ data['state'] = 'present'
+ data['is_mandatory'] = False
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['fpolicy_policy_record'], # get
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_fpolicy_policy_mock_object(cx_type='rest').apply()
+ assert not exc.value.args[0]['changed']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_fpolicy_scope.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_fpolicy_scope.py
new file mode 100644
index 000000000..b09ab26ae
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_fpolicy_scope.py
@@ -0,0 +1,351 @@
+# (c) 2021, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test for ONTAP fpolicy scope Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_fpolicy_scope \
+ import NetAppOntapFpolicyScope as my_module # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+class MockONTAPConnection():
+ ''' mock server connection to ONTAP host '''
+
+ def __init__(self, kind=None):
+ ''' save arguments '''
+ self.type = kind
+ self.xml_in = None
+ self.xml_out = None
+
+ def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
+ ''' mock invoke_successfully returning xml data '''
+ self.xml_in = xml
+ if self.type == 'fpolicy_scope':
+ xml = self.build_fpolicy_scope_info()
+ elif self.type == 'fpolicy_scope_fail':
+ raise netapp_utils.zapi.NaApiError(code='TEST', message="This exception is from the unit test")
+ self.xml_out = xml
+ return xml
+
+ @staticmethod
+ def build_fpolicy_scope_info():
+ ''' build xml data for fpolicy-policy-info '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ data = {
+ 'attributes-list': {
+ 'fpolicy-scope-config': {
+ 'vserver': 'svm1',
+ 'policy-name': 'policy1',
+ 'export-policies-to-exclude': [
+ {'string': 'export1'}
+ ],
+ 'is-file-extension-check-on-directories-enabled': True,
+ 'is-monitoring-of-objects-with-no-extension-enabled': False
+ }
+ }
+ }
+ xml.translate_struct(data)
+ return xml
+
+
+def default_args():
+ args = {
+ 'vserver': 'svm1',
+ 'name': 'policy1',
+ 'export_policies_to_exclude': 'export1',
+ 'hostname': '10.10.10.10',
+ 'username': 'username',
+ 'password': 'password',
+ 'use_rest': 'always'
+ }
+ return args
+
+
+# REST API canned responses when mocking send_request
+SRR = {
+ # common responses
+ 'is_rest': (200, dict(version=dict(generation=9, major=9, minor=0, full='dummy')), None),
+ 'is_rest_9_8': (200, dict(version=dict(generation=9, major=8, minor=0, full='dummy')), None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'zero_record': (200, dict(records=[], num_records=0), None),
+ 'one_record_uuid': (200, dict(records=[dict(uuid='a1b2c3')], num_records=1), None),
+ 'end_of_sequence': (500, None, "Unexpected call to send_request"),
+ 'generic_error': (400, None, "Expected error"),
+ 'one_fpolicy_scope_record': (200, {
+ "records": [{
+ 'vserver': 'svm1',
+ 'policy_name': 'policy1',
+ 'export_policies_to_exclude': ['export1'],
+ 'is_file_extension_check_on_directories_enabled': True,
+ 'is_monitoring_of_objects_with_no_extension_enabled': False
+ }],
+ 'num_records': 1
+ }, None)
+}
+
+
+def get_fpolicy_scope_mock_object(cx_type='zapi', kind=None):
+ fpolicy_scope_obj = my_module()
+ if cx_type == 'zapi':
+ if kind is None:
+ fpolicy_scope_obj.server = MockONTAPConnection()
+ else:
+ fpolicy_scope_obj.server = MockONTAPConnection(kind=kind)
+ return fpolicy_scope_obj
+
+
+def test_module_fail_when_required_args_missing(patch_ansible):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ my_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+
+def test_ensure_get_called(patch_ansible):
+ ''' test get_fpolicy_scope for non-existent policy'''
+ args = dict(default_args())
+ args['use_rest'] = 'never'
+ set_module_args(args)
+ print('starting')
+ my_obj = my_module()
+ print('use_rest:', my_obj.use_rest)
+ my_obj.server = MockONTAPConnection()
+ assert my_obj.get_fpolicy_scope is not None
+
+
+def test_rest_missing_arguments(patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' create fpolicy scope '''
+ args = dict(default_args())
+ del args['hostname']
+ set_module_args(args)
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_module()
+ msg = 'missing required arguments: hostname'
+ assert exc.value.args[0]['msg'] == msg
+
+
+@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_fpolicy_scope.NetAppOntapFpolicyScope.create_fpolicy_scope')
+def test_successful_create(self, patch_ansible):
+ ''' creating fpolicy_scope and test idempotency '''
+ args = dict(default_args())
+ args['use_rest'] = 'never'
+ set_module_args(args)
+ my_obj = my_module()
+ my_obj.server = MockONTAPConnection()
+ with patch.object(my_module, 'create_fpolicy_scope', wraps=my_obj.create_fpolicy_scope) as mock_create:
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Create: ' + repr(exc.value))
+ assert exc.value.args[0]['changed']
+ mock_create.assert_called_with()
+ # test idempotency
+ args = dict(default_args())
+ args['use_rest'] = 'never'
+ set_module_args(args)
+ my_obj = my_module()
+ my_obj.server = MockONTAPConnection('fpolicy_scope')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Create: ' + repr(exc.value))
+ assert not exc.value.args[0]['changed']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_fpolicy_scope.NetAppOntapFpolicyScope.delete_fpolicy_scope')
+def test_successful_delete(self, patch_ansible):
+ ''' delete fpolicy_scope and test idempotency '''
+ args = dict(default_args())
+ args['use_rest'] = 'never'
+ args['state'] = 'absent'
+ set_module_args(args)
+ my_obj = my_module()
+ my_obj.server = MockONTAPConnection('fpolicy_scope')
+ with patch.object(my_module, 'delete_fpolicy_scope', wraps=my_obj.delete_fpolicy_scope) as mock_delete:
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Delete: ' + repr(exc.value))
+ assert exc.value.args[0]['changed']
+ mock_delete.assert_called_with()
+ # test idempotency
+ args = dict(default_args())
+ args['use_rest'] = 'never'
+ args['state'] = 'absent'
+ set_module_args(args)
+ my_obj = my_module()
+ my_obj.server = MockONTAPConnection()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Delete: ' + repr(exc.value))
+ assert not exc.value.args[0]['changed']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_fpolicy_scope.NetAppOntapFpolicyScope.modify_fpolicy_scope')
+def test_successful_modify(self, patch_ansible):
+ ''' modifying fpolicy_scope and testing idempotency '''
+ args = dict(default_args())
+ args['use_rest'] = 'never'
+ args['export_policies_to_exclude'] = 'export1,export2'
+ set_module_args(args)
+ my_obj = my_module()
+ my_obj.server = MockONTAPConnection('fpolicy_scope')
+ with patch.object(my_module, 'modify_fpolicy_scope', wraps=my_obj.modify_fpolicy_scope) as mock_modify:
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Modify: ' + repr(exc.value))
+ assert exc.value.args[0]['changed']
+ mock_modify.assert_called_with({'export_policies_to_exclude': ['export1', 'export2']})
+ # test idempotency
+ args = dict(default_args())
+ args['use_rest'] = 'never'
+ set_module_args(args)
+ my_obj = my_module()
+ my_obj.server = MockONTAPConnection('fpolicy_scope')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Modify: ' + repr(exc.value))
+ print(exc.value.args[0]['changed'])
+ assert not exc.value.args[0]['changed']
+
+
+def test_if_all_methods_catch_exception(patch_ansible):
+ args = dict(default_args())
+ args['use_rest'] = 'never'
+ set_module_args(args)
+ my_obj = my_module()
+ my_obj.server = MockONTAPConnection('fpolicy_scope_fail')
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.create_fpolicy_scope()
+ assert 'Error creating fPolicy policy scope ' in exc.value.args[0]['msg']
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.delete_fpolicy_scope()
+ assert 'Error deleting fPolicy policy scope ' in exc.value.args[0]['msg']
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.modify_fpolicy_scope(modify={})
+ assert 'Error modifying fPolicy policy scope ' in exc.value.args[0]['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_create(mock_request, patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' create fpolicy scope '''
+ args = dict(default_args())
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['zero_record'], # get
+ SRR['empty_good'], # post
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is True
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 3
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_create_no_action(mock_request, patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' create fpolicy scope idempotent '''
+ args = dict(default_args())
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['one_fpolicy_scope_record'], # get
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is False
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 2
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_delete_no_action(mock_request, patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' delete fpolicy scope '''
+ args = dict(default_args())
+ args['state'] = 'absent'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['zero_record'], # get
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is False
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 2
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_delete(mock_request, patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' delete fpolicy scope '''
+ args = dict(default_args())
+ args['state'] = 'absent'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['one_fpolicy_scope_record'], # get
+ SRR['empty_good'], # delete
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is True
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 3
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_modify_no_action(mock_request, patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' modify fpolicy scope '''
+ args = dict(default_args())
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['one_fpolicy_scope_record'], # get
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is False
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 2
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_modify_prepopulate(mock_request, patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' modify fpolicy scope '''
+ args = dict(default_args())
+ args['export_policies_to_exclude'] = 'export1,export2'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['one_fpolicy_scope_record'], # get
+ SRR['empty_good'], # patch
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is True
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 3
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_fpolicy_status.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_fpolicy_status.py
new file mode 100644
index 000000000..64674a3aa
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_fpolicy_status.py
@@ -0,0 +1,286 @@
+# (c) 2021, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test for ONTAP fpolicy status Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_fpolicy_status \
+ import NetAppOntapFpolicyStatus as my_module # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+class MockONTAPConnection():
+ ''' mock server connection to ONTAP host '''
+
+ def __init__(self, kind=None):
+ ''' save arguments '''
+ self.type = kind
+ self.xml_in = None
+ self.xml_out = None
+
+ def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
+ ''' mock invoke_successfully returning xml data '''
+ self.xml_in = xml
+ if self.type == 'fpolicy_policy_enabled':
+ xml = self.build_fpolicy_status_info_enabled()
+ elif self.type == 'fpolicy_policy_disabled':
+ xml = self.build_fpolicy_status_info_disabled()
+ elif self.type == 'fpolicy_policy_fail':
+ raise netapp_utils.zapi.NaApiError(code='TEST', message="This exception is from the unit test")
+ self.xml_out = xml
+ return xml
+
+ @staticmethod
+ def build_fpolicy_status_info_enabled():
+ ''' build xml data for fpolicy-policy-status-info '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ data = {
+ 'attributes-list': {
+ 'fpolicy-policy-status-info': {
+ 'vserver': 'svm1',
+ 'policy-name': 'fPolicy1',
+ 'status': 'true'
+ }
+ }
+ }
+ xml.translate_struct(data)
+ return xml
+
+ @staticmethod
+ def build_fpolicy_status_info_disabled():
+ ''' build xml data for fpolicy-policy-status-info '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ data = {
+ 'attributes-list': {
+ 'fpolicy-policy-status-info': {
+ 'vserver': 'svm1',
+ 'policy-name': 'fPolicy1',
+ 'status': 'false'
+ }
+ }
+ }
+ xml.translate_struct(data)
+ return xml
+
+
+def default_args():
+ args = {
+ 'vserver': 'svm1',
+ 'policy_name': 'fPolicy1',
+ 'sequence_number': '10',
+ 'hostname': '10.10.10.10',
+ 'username': 'username',
+ 'password': 'password',
+ 'use_rest': 'always'
+ }
+ return args
+
+
+# REST API canned responses when mocking send_request
+SRR = {
+ # common responses
+ 'is_rest': (200, dict(version=dict(generation=9, major=9, minor=0, full='dummy')), None),
+ 'is_rest_9_8': (200, dict(version=dict(generation=9, major=8, minor=0, full='dummy')), None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'zero_record': (200, dict(records=[], num_records=0), None),
+ # 'one_record_uuid': (200, dict(records=[dict(uuid='a1b2c3')], num_records=1), None),
+ 'end_of_sequence': (500, None, "Unexpected call to send_request"),
+ 'generic_error': (400, None, "Expected error"),
+ 'uuid': (200, {
+ 'records': [{
+ 'uuid': '56ab5d21'
+ }],
+ 'num_records': 1
+ }, None),
+ 'fpolicy_status_info_enabled': (200, {
+ 'records': [{
+ 'svm': {
+ 'uuid': '56ab5d21',
+ 'name': 'svm1'
+ },
+ 'policies': [{
+ 'name': 'fPolicy1',
+ 'enabled': True,
+ 'priority': 10
+ }]
+ }],
+ 'num_records': 1
+ }, None),
+ 'fpolicy_status_info_disabled': (200, {
+ 'records': [{
+ 'svm': {
+ 'uuid': '56ab5d21',
+ 'name': 'svm1'
+ },
+ 'policies': [{
+ 'name': 'fPolicy1',
+ 'enabled': False
+ }]
+ }],
+ 'num_records': 1
+ }, None)
+
+}
+
+
+def get_fpolicy_status_mock_object(cx_type='zapi', kind=None):
+ fpolicy_status_obj = my_module()
+ if cx_type == 'zapi':
+ if kind is None:
+ fpolicy_status_obj.server = MockONTAPConnection()
+ else:
+ fpolicy_status_obj.server = MockONTAPConnection(kind=kind)
+ return fpolicy_status_obj
+
+
+def test_module_fail_when_required_args_missing(patch_ansible):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ my_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+
+def test_ensure_get_called(patch_ansible):
+ ''' test get_fpolicy_policy_status for non-existent fPolicy'''
+ args = dict(default_args())
+ args['use_rest'] = 'never'
+ set_module_args(args)
+ print('starting')
+ my_obj = my_module()
+ print('use_rest:', my_obj.use_rest)
+ my_obj.server = MockONTAPConnection('fpolicy_policy_enabled')
+ assert my_obj.get_fpolicy_policy_status is not None
+
+
+def test_rest_missing_arguments(patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' enable fpolicy '''
+ args = dict(default_args())
+ del args['hostname']
+ set_module_args(args)
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_module()
+ msg = 'missing required arguments: hostname'
+ assert exc.value.args[0]['msg'] == msg
+
+
+@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_fpolicy_status.NetAppOntapFpolicyStatus.enable_fpolicy_policy')
+def test_successful_enable(self, patch_ansible):
+ ''' Enable fPolicy and test idempotency '''
+ args = dict(default_args())
+ args['use_rest'] = 'never'
+ set_module_args(args)
+ my_obj = my_module()
+ my_obj.server = MockONTAPConnection('fpolicy_policy_disabled')
+ with patch.object(my_module, 'enable_fpolicy_policy', wraps=my_obj.enable_fpolicy_policy) as mock_enable:
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Enable: ' + repr(exc.value))
+ assert exc.value.args[0]['changed']
+ mock_enable.assert_called_with()
+ # test idempotency
+ args = dict(default_args())
+ args['use_rest'] = 'never'
+ set_module_args(args)
+ my_obj = my_module()
+ my_obj.server = MockONTAPConnection('fpolicy_policy_enabled')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Enable: ' + repr(exc.value))
+ assert not exc.value.args[0]['changed']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_fpolicy_status.NetAppOntapFpolicyStatus.disable_fpolicy_policy')
+def test_successful_disable(self, patch_ansible):
+ ''' Disable fPolicy and test idempotency '''
+ args = dict(default_args())
+ args['use_rest'] = 'never'
+ args['state'] = 'absent'
+ set_module_args(args)
+ my_obj = my_module()
+ my_obj.server = MockONTAPConnection('fpolicy_policy_enabled')
+ with patch.object(my_module, 'disable_fpolicy_policy', wraps=my_obj.disable_fpolicy_policy) as mock_disable:
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Enable: ' + repr(exc.value))
+ assert exc.value.args[0]['changed']
+ mock_disable.assert_called_with()
+ # test idempotency
+ args = dict(default_args())
+ args['use_rest'] = 'never'
+ args['state'] = 'absent'
+ set_module_args(args)
+ my_obj = my_module()
+ my_obj.server = MockONTAPConnection('fpolicy_policy_disabled')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Enable: ' + repr(exc.value))
+ assert not exc.value.args[0]['changed']
+
+
+def test_if_all_methods_catch_exception(patch_ansible):
+ args = dict(default_args())
+ args['use_rest'] = 'never'
+ set_module_args(args)
+ my_obj = my_module()
+ my_obj.server = MockONTAPConnection('fpolicy_policy_fail')
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.enable_fpolicy_policy()
+ print(str(exc.value.args[0]['msg']))
+ assert 'Error enabling fPolicy policy ' in exc.value.args[0]['msg']
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.disable_fpolicy_policy()
+ assert 'Error disabling fPolicy policy ' in exc.value.args[0]['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_enable(mock_request, patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' enable fPolicy policy '''
+ args = dict(default_args())
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['uuid'], # get
+ SRR['fpolicy_status_info_disabled'], # get
+ SRR['empty_good'], # patch
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is True
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 4
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_disable(mock_request, patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' disable fPolicy policy '''
+ args = dict(default_args())
+ args['state'] = 'absent'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['uuid'], # get
+ SRR['fpolicy_status_info_enabled'], # get
+ SRR['empty_good'], # patch
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is True
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 4
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_igroup.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_igroup.py
new file mode 100644
index 000000000..5e5b7c64c
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_igroup.py
@@ -0,0 +1,415 @@
+# (c) 2018-2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test template for ONTAP Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+import sys
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, patch_ansible, create_module, create_and_apply, assert_warning_was_raised, assert_no_warnings, print_warnings
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke,\
+ register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_igroup \
+ import NetAppOntapIgroup as igroup # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+
+DEFAULT_ARGS = {
+ 'vserver': 'vserver',
+ 'name': 'test',
+ 'initiator_names': 'init1',
+ 'ostype': 'linux',
+ 'initiator_group_type': 'fcp',
+ 'bind_portset': 'true',
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'use_rest': 'never'
+}
+
+igroup_with_initiator = {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'vserver': 'vserver',
+ 'initiator-group-os-type': 'linux',
+ 'initiator-group-info': {
+ 'initiators': [
+ {'initiator-info': {'initiator-name': 'init1'}},
+ {'initiator-info': {'initiator-name': 'init2'}}
+ ]
+ }
+ }
+}
+
+igroup_without_initiator = {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'initiator-group-info': {'vserver': 'test'}
+ }
+}
+
+ZRR = zapi_responses({
+ 'igroup_with_initiator_info': build_zapi_response(igroup_with_initiator),
+ 'igroup_without_initiator_info': build_zapi_response(igroup_without_initiator)
+})
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ igroup()
+ msg = 'missing required arguments:'
+ assert msg in exc.value.args[0]['msg']
+
+
+def test_get_nonexistent_igroup():
+ ''' Test if get_igroup returns None for non-existent igroup '''
+ register_responses([
+ ('igroup-get-iter', ZRR['empty'])
+ ])
+ igroup_obj = create_module(igroup, DEFAULT_ARGS)
+ result = igroup_obj.get_igroup('dummy')
+ assert result is None
+
+
+def test_get_existing_igroup_with_initiators():
+ ''' Test if get_igroup returns list of existing initiators '''
+ register_responses([
+ ('igroup-get-iter', ZRR['igroup_with_initiator_info'])
+ ])
+ igroup_obj = create_module(igroup, DEFAULT_ARGS)
+ result = igroup_obj.get_igroup('igroup')
+ assert DEFAULT_ARGS['initiator_names'] in result['initiator_names']
+ assert result['initiator_names'] == ['init1', 'init2']
+
+
+def test_get_existing_igroup_without_initiators():
+ ''' Test if get_igroup returns empty list() '''
+ register_responses([
+ ('igroup-get-iter', ZRR['igroup_without_initiator_info'])
+ ])
+ igroup_obj = create_module(igroup, DEFAULT_ARGS)
+ result = igroup_obj.get_igroup('igroup')
+ assert result['initiator_names'] == []
+
+
+def test_modify_initiator_calls_add_and_remove():
+ '''Test remove_initiator() is called followed by add_initiator() on modify operation'''
+ register_responses([
+ ('igroup-get-iter', ZRR['igroup_with_initiator_info']),
+ ('igroup-remove', ZRR['success']),
+ ('igroup-remove', ZRR['success']),
+ ('igroup-add', ZRR['success'])
+ ])
+ igroup_obj = create_and_apply(igroup, DEFAULT_ARGS, {'initiator_names': 'replacewithme'})['changed']
+
+
+def test_modify_called_from_add():
+ '''Test remove_initiator() and add_initiator() calls modify'''
+ register_responses([
+ ('igroup-get-iter', ZRR['igroup_without_initiator_info']),
+ ('igroup-add', ZRR['success'])
+ ])
+ igroup_obj = create_and_apply(igroup, DEFAULT_ARGS, {'initiator_names': 'replacewithme'})['changed']
+
+
+def test_modify_called_from_remove():
+ '''Test remove_initiator() and add_initiator() calls modify'''
+ register_responses([
+ ('igroup-get-iter', ZRR['igroup_with_initiator_info']),
+ ('igroup-remove', ZRR['success']),
+ ('igroup-remove', ZRR['success'])
+ ])
+ igroup_obj = create_and_apply(igroup, DEFAULT_ARGS, {'initiator_names': ''})['changed']
+
+
+def test_successful_create():
+ ''' Test successful create '''
+ register_responses([
+ ('igroup-get-iter', ZRR['empty']),
+ ('igroup-create', ZRR['success']),
+ ('igroup-add', ZRR['success'])
+ ])
+ igroup_obj = create_and_apply(igroup, DEFAULT_ARGS)['changed']
+
+
+def test_successful_delete():
+ ''' Test successful delete '''
+ register_responses([
+ ('igroup-get-iter', ZRR['igroup_with_initiator_info']),
+ ('igroup-destroy', ZRR['success'])
+ ])
+ igroup_obj = create_and_apply(igroup, DEFAULT_ARGS, {'state': 'absent'})['changed']
+
+
+def test_successful_rename():
+ '''Test successful rename'''
+ register_responses([
+ ('igroup-get-iter', ZRR['empty']),
+ ('igroup-get-iter', ZRR['igroup_with_initiator_info']),
+ ('igroup-rename', ZRR['success']),
+ ('igroup-remove', ZRR['success']),
+ ])
+ args = {
+ 'from_name': 'test',
+ 'name': 'test_new'
+ }
+ assert create_and_apply(igroup, DEFAULT_ARGS, args)['changed']
+
+
+def test_negative_modify_anything_zapi():
+ ''' Test ZAPI option not currently supported in REST is rejected '''
+ register_responses([
+ ('igroup-get-iter', ZRR['igroup_with_initiator_info']),
+ ])
+ args = {
+ 'vserver': 'my_vserver',
+ 'use_rest': 'never'
+ }
+ msg = "Error: modifying {'vserver': 'my_vserver'} is not supported in ZAPI"
+ assert msg in create_and_apply(igroup, DEFAULT_ARGS, args, fail=True)['msg']
+
+
+def test_negative_mutually_exclusive():
+ ''' Test ZAPI option not currently supported in REST is rejected '''
+ args = {
+ 'use_rest': 'auto',
+ 'igroups': 'my_group'
+ }
+ msg = "parameters are mutually exclusive: igroups|initiator_names"
+ assert msg in create_module(igroup, DEFAULT_ARGS, args, fail=True)['msg']
+
+
+def test_negative_igroups_require_rest():
+ ''' Test ZAPI option not currently supported in REST is rejected '''
+ DEFAULT_ARGS_COPY = DEFAULT_ARGS.copy()
+ del DEFAULT_ARGS_COPY['initiator_names']
+ args = {
+ 'igroups': 'my_group'
+ }
+ msg = "requires ONTAP 9.9.1 or later and REST must be enabled"
+ assert msg in create_module(igroup, DEFAULT_ARGS_COPY, args, fail=True)['msg']
+
+
+SRR = rest_responses({
+ 'one_igroup_record': (200, dict(records=[
+ dict(uuid='a1b2c3',
+ name='test',
+ svm=dict(name='vserver'),
+ initiators=[{'name': 'todelete'}],
+ protocol='fcp',
+ os_type='aix')
+ ], num_records=1), None),
+ 'one_record_uuid': (200, dict(records=[dict(uuid='a1b2c3')], num_records=1), None)
+})
+
+
+def test_successful_create_rest():
+ ''' Test successful create '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'protocols/san/igroups', SRR['empty_records']),
+ ('POST', 'protocols/san/igroups', SRR['success'])
+ ])
+ assert create_and_apply(igroup, DEFAULT_ARGS, {'use_rest': 'always'})['changed']
+
+
+def test_incomplete_record_rest():
+ ''' Test successful create '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'protocols/san/igroups', SRR['one_record_uuid'])
+ ])
+ msg = "Error: unexpected igroup body:"
+ assert msg in create_and_apply(igroup, DEFAULT_ARGS, {'use_rest': 'always'}, fail=True)['msg']
+
+
+def test_successful_delete_rest():
+ ''' Test successful delete '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'protocols/san/igroups', SRR['one_igroup_record']),
+ ('DELETE', 'protocols/san/igroups/a1b2c3', SRR['success'])
+ ])
+ args = {'state': 'absent', 'use_rest': 'always'}
+ assert create_and_apply(igroup, DEFAULT_ARGS, args)['changed']
+
+
+def test_successful_modify_rest():
+ ''' Test successful modify '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'protocols/san/igroups', SRR['one_igroup_record']),
+ ('DELETE', 'protocols/san/igroups/a1b2c3/initiators/todelete', SRR['success']),
+ ('POST', 'protocols/san/igroups/a1b2c3/initiators', SRR['success']),
+ ('PATCH', 'protocols/san/igroups/a1b2c3', SRR['success'])
+ ])
+ assert create_and_apply(igroup, DEFAULT_ARGS, {'use_rest': 'always'})['changed']
+
+
+def test_successful_modify_initiator_objects_rest():
+ ''' Test successful modify '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'protocols/san/igroups', SRR['one_igroup_record']),
+ ('DELETE', 'protocols/san/igroups/a1b2c3/initiators/todelete', SRR['success']),
+ ('POST', 'protocols/san/igroups/a1b2c3/initiators', SRR['success']),
+ ('PATCH', 'protocols/san/igroups/a1b2c3', SRR['success'])
+ ])
+ DEFAULT_ARGS_COPY = DEFAULT_ARGS.copy()
+ del DEFAULT_ARGS_COPY['initiator_names']
+ DEFAULT_ARGS_COPY['initiator_objects'] = [{'name': 'init1', 'comment': 'comment1'}]
+ assert create_and_apply(igroup, DEFAULT_ARGS_COPY, {'use_rest': 'always'})['changed']
+
+
+def test_successful_modify_initiator_objects_comment_rest():
+ ''' Test successful modify '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'protocols/san/igroups', SRR['one_igroup_record']),
+ ('PATCH', 'protocols/san/igroups/a1b2c3/initiators/todelete', SRR['success']),
+ ('PATCH', 'protocols/san/igroups/a1b2c3', SRR['success'])
+ ])
+ DEFAULT_ARGS_COPY = DEFAULT_ARGS.copy()
+ del DEFAULT_ARGS_COPY['initiator_names']
+ DEFAULT_ARGS_COPY['initiator_objects'] = [{'name': 'todelete', 'comment': 'comment1'}]
+ assert create_and_apply(igroup, DEFAULT_ARGS_COPY, {'use_rest': 'always'})['changed']
+
+
+def test_successful_modify_igroups_rest():
+ ''' Test successful modify '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'protocols/san/igroups', SRR['one_igroup_record']),
+ ('DELETE', 'protocols/san/igroups/a1b2c3/initiators/todelete', SRR['success']),
+ ('POST', 'protocols/san/igroups/a1b2c3/igroups', SRR['success']),
+ ('PATCH', 'protocols/san/igroups/a1b2c3', SRR['success'])
+ ])
+ DEFAULT_ARGS_COPY = DEFAULT_ARGS.copy()
+ del DEFAULT_ARGS_COPY['initiator_names']
+ args = {
+ 'igroups': ['test_igroup'],
+ 'use_rest': 'auto',
+ 'force_remove_initiator': True
+ }
+ assert create_and_apply(igroup, DEFAULT_ARGS_COPY, args)['changed']
+
+
+def test_9_9_0_no_igroups_rest():
+ ''' Test failed to use igroups '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0'])
+ ])
+ DEFAULT_ARGS_COPY = DEFAULT_ARGS.copy()
+ del DEFAULT_ARGS_COPY['initiator_names']
+ args = {
+ 'igroups': ['test_igroup'],
+ 'use_rest': 'always'
+ }
+ msg = 'Error: using igroups requires ONTAP 9.9.1 or later and REST must be enabled - ONTAP version: 9.9.0.'
+ assert msg in create_module(igroup, DEFAULT_ARGS_COPY, args, fail=True)['msg']
+
+
+def test_successful_rename_rest():
+ '''Test successful rename'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'protocols/san/igroups', SRR['empty_records']),
+ ('GET', 'protocols/san/igroups', SRR['one_igroup_record']),
+ ('DELETE', 'protocols/san/igroups/a1b2c3/initiators/todelete', SRR['success']),
+ ('POST', 'protocols/san/igroups/a1b2c3/initiators', SRR['success']),
+ ('PATCH', 'protocols/san/igroups/a1b2c3', SRR['success'])
+ ])
+ args = {
+ 'use_rest': 'always',
+ 'from_name': 'test',
+ 'name': 'test_new'
+ }
+ assert create_and_apply(igroup, DEFAULT_ARGS, args)['changed']
+
+
+def test_negative_zapi_or_rest99_option():
+ ''' Test ZAPI option not currently supported in REST is rejected '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0'])
+ ])
+ args = {
+ 'use_rest': 'always',
+ 'bind_portset': 'my_portset'
+ }
+ create_module(igroup, DEFAULT_ARGS, args)
+ msg = "Warning: falling back to ZAPI: using bind_portset requires ONTAP 9.9 or later and REST must be enabled - ONTAP version: 9.8.0."
+ print_warnings()
+ assert_warning_was_raised(msg)
+
+
+def test_positive_zapi_or_rest99_option():
+ ''' Test ZAPI option not currently supported in REST forces ZAPI calls '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0'])
+ ])
+ args = {
+ 'use_rest': 'auto',
+ 'bind_portset': 'my_portset'
+ }
+ create_module(igroup, DEFAULT_ARGS, args)
+ msg = "Warning: falling back to ZAPI: using bind_portset requires ONTAP 9.9 or later and REST must be enabled - ONTAP version: 9.8.0."
+ print_warnings()
+ assert_warning_was_raised(msg)
+
+
+def test_create_rest_99():
+ ''' Test 9.9 option works with REST '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'protocols/san/igroups', SRR['empty_records']),
+ ('POST', 'protocols/san/igroups', SRR['success'])
+ ])
+ args = {
+ 'use_rest': 'auto',
+ 'bind_portset': 'my_portset'
+ }
+ assert create_and_apply(igroup, DEFAULT_ARGS, args)['changed']
+ print_warnings
+ assert_no_warnings()
+
+
+def test_negative_modify_vserver_rest():
+ ''' Test ZAPI option not currently supported in REST is rejected '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'protocols/san/igroups', SRR['one_igroup_record'])
+ ])
+ args = {
+ 'vserver': 'my_vserver',
+ 'use_rest': 'always'
+ }
+ msg = "Error: modifying {'vserver': 'my_vserver'} is not supported in REST"
+ assert msg in create_and_apply(igroup, DEFAULT_ARGS, args, fail=True)['msg']
+
+
+def test_negative_igroups_require_9_9():
+ ''' Test ZAPI option not currently supported in REST is rejected '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0'])
+ ])
+ DEFAULT_ARGS_COPY = DEFAULT_ARGS.copy()
+ del DEFAULT_ARGS_COPY['initiator_names']
+ args = {
+ 'igroups': 'test_igroup',
+ 'use_rest': 'always'
+ }
+ msg = "requires ONTAP 9.9.1 or later and REST must be enabled"
+ assert msg in create_module(igroup, DEFAULT_ARGS_COPY, args, fail=True)['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_igroup_initiator.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_igroup_initiator.py
new file mode 100644
index 000000000..7da908dcb
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_igroup_initiator.py
@@ -0,0 +1,256 @@
+# (c) 2018-2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test template for ONTAP Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+import sys
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible, create_module, create_and_apply, expect_and_capture_ansible_exception
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke,\
+ register_responses
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_igroup_initiator \
+ import NetAppOntapIgroupInitiator as initiator # module under test
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_responses
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+
+DEFAULT_ARGS = {
+ 'state': 'present',
+ 'vserver': 'vserver',
+ 'name': 'init1',
+ 'initiator_group': 'test',
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'use_rest': 'never'
+}
+
+
+initiator_info = {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'initiator-group-info': {
+ 'initiators': [
+ {'initiator-info': {'initiator-name': 'init1'}},
+ {'initiator-info': {'initiator-name': 'init2'}}
+ ]
+ }
+ }
+}
+
+
+ZRR = zapi_responses({
+ 'initiator_info': build_zapi_response(initiator_info)
+})
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ initiator()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+
+def test_get_nonexistent_igroup():
+ ''' Test if get_initiators returns None for non-existent initiator '''
+ register_responses([
+ ('igroup-get-iter', ZRR['empty'])
+ ])
+ initiator_obj = create_module(initiator, DEFAULT_ARGS)
+ result = initiator_obj.get_initiators()
+ assert result == []
+
+
+def test_get_existing_initiator():
+ ''' Test if get_initiator returns None for existing initiator '''
+ register_responses([
+ ('igroup-get-iter', ZRR['initiator_info'])
+ ])
+ initiator_obj = create_module(initiator, DEFAULT_ARGS)
+ result = initiator_obj.get_initiators()
+ assert DEFAULT_ARGS['name'] in result
+ assert result == ['init1', 'init2'] # from build_igroup_initiators()
+
+
+def test_successful_add():
+ ''' Test successful add'''
+ register_responses([
+ ('igroup-get-iter', ZRR['initiator_info']),
+ ('igroup-add', ZRR['success'])
+ ])
+ args = {'name': 'init3'}
+ assert create_and_apply(initiator, DEFAULT_ARGS, args)['changed']
+
+
+def test_successful_add_idempotency():
+ ''' Test successful add idempotency '''
+ register_responses([
+ ('igroup-get-iter', ZRR['initiator_info'])
+ ])
+ assert create_and_apply(initiator, DEFAULT_ARGS)['changed'] is False
+
+
+def test_successful_remove():
+ ''' Test successful remove '''
+ register_responses([
+ ('igroup-get-iter', ZRR['initiator_info']),
+ ('igroup-remove', ZRR['success'])
+ ])
+ args = {'state': 'absent'}
+ assert create_and_apply(initiator, DEFAULT_ARGS, args)['changed']
+
+
+def test_successful_remove_idempotency():
+ ''' Test successful remove idempotency'''
+ register_responses([
+ ('igroup-get-iter', ZRR['initiator_info'])
+ ])
+ args = {'state': 'absent', 'name': 'alreadyremoved'}
+ assert create_and_apply(initiator, DEFAULT_ARGS)['changed'] is False
+
+
+def test_if_all_methods_catch_exception():
+ register_responses([
+ ('igroup-get-iter', ZRR['error']),
+ ('igroup-add', ZRR['error']),
+ ('igroup-remove', ZRR['error'])
+ ])
+ initiator_obj = create_module(initiator, DEFAULT_ARGS)
+
+ error = expect_and_capture_ansible_exception(initiator_obj.get_initiators, 'fail')['msg']
+ assert 'Error fetching igroup info' in error
+
+ error = expect_and_capture_ansible_exception(initiator_obj.modify_initiator, 'fail', 'init4', 'igroup-add')['msg']
+ assert 'Error modifying igroup initiator' in error
+
+ error = expect_and_capture_ansible_exception(initiator_obj.modify_initiator, 'fail', 'init4', 'igroup-remove')['msg']
+ assert 'Error modifying igroup initiator' in error
+
+
+SRR = rest_responses({
+ 'initiator_info': (200, {"records": [
+ {
+ "svm": {"name": "svm1"},
+ "uuid": "897de45f-bbbf-11ec-9f18-005056b3b297",
+ "name": "init1",
+ "initiators": [
+ {"name": "iqn.2001-04.com.example:abc123"},
+ {"name": "iqn.2001-04.com.example:abc124"},
+ {'name': 'init3'}
+ ]
+ }
+ ], "num_records": 1}, None),
+ 'igroup_without_intiators': (200, {"records": [
+ {
+ "svm": {"name": "svm1"},
+ "uuid": "897de45f-bbbf-11ec-9f18-005056alr297",
+ "name": "init22",
+ }
+ ], "num_records": 1}, None)
+})
+
+
+def test_successful_add_rest():
+ ''' Test successful add'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'protocols/san/igroups', SRR['initiator_info']),
+ ('POST', 'protocols/san/igroups/897de45f-bbbf-11ec-9f18-005056b3b297/initiators', SRR['success'])
+ ])
+ assert create_and_apply(initiator, DEFAULT_ARGS, {'use_rest': 'always'})['changed']
+
+
+def test_successful_add_idempotency_rest():
+ ''' Test successful add idempotency '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'protocols/san/igroups', SRR['initiator_info'])
+ ])
+ args = {'use_rest': 'always', 'name': 'iqn.2001-04.com.example:abc123'}
+ assert create_and_apply(initiator, DEFAULT_ARGS, args)['changed'] is False
+
+
+def test_successful_add_to_0_initiator_igroup_rest():
+ ''' Test successful add'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'protocols/san/igroups', SRR['igroup_without_intiators']),
+ ('POST', 'protocols/san/igroups/897de45f-bbbf-11ec-9f18-005056alr297/initiators', SRR['success'])
+ ])
+ assert create_and_apply(initiator, DEFAULT_ARGS, {'use_rest': 'always'})['changed']
+
+
+def test_successful_remove_rest():
+ ''' Test successful remove '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'protocols/san/igroups', SRR['initiator_info']),
+ ('DELETE', 'protocols/san/igroups/897de45f-bbbf-11ec-9f18-005056b3b297/initiators/init3', SRR['success'])
+ ])
+ args = {'use_rest': 'always', 'name': 'init3', 'state': 'absent'}
+ assert create_and_apply(initiator, DEFAULT_ARGS, args)['changed']
+
+
+def test_successful_remove_idempotency_rest():
+ ''' Test successful remove idempotency'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'protocols/san/igroups', SRR['initiator_info'])
+ ])
+ args = {'use_rest': 'always', 'name': 'alreadyremoved', 'state': 'absent'}
+ assert create_and_apply(initiator, DEFAULT_ARGS, args)['changed'] is False
+
+
+def test_get_initiator_catch_exception_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'protocols/san/igroups', SRR['generic_error'])
+ ])
+ error = create_and_apply(initiator, DEFAULT_ARGS, {'use_rest': 'always'}, 'fail')['msg']
+ assert 'Error fetching igroup info' in error
+
+
+def test_add_initiator_catch_exception_rest():
+ ''' Test add error'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'protocols/san/igroups', SRR['initiator_info']),
+ ('POST', 'protocols/san/igroups/897de45f-bbbf-11ec-9f18-005056b3b297/initiators', SRR['generic_error'])
+ ])
+ error = create_and_apply(initiator, DEFAULT_ARGS, {'use_rest': 'always'}, 'fail')['msg']
+ assert 'Error modifying igroup initiator' in error
+
+
+def test_remove_initiator_catch_exception_rest():
+ ''' Test remove error'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'protocols/san/igroups', SRR['initiator_info']),
+ ('DELETE', 'protocols/san/igroups/897de45f-bbbf-11ec-9f18-005056b3b297/initiators/init3', SRR['generic_error'])
+ ])
+ args = {'use_rest': 'always', 'name': 'init3', 'state': 'absent'}
+ error = create_and_apply(initiator, DEFAULT_ARGS, args, 'fail')['msg']
+ assert 'Error modifying igroup initiator' in error
+
+
+def test_error_uuid_not_found():
+ ''' Test uuid error'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'protocols/san/igroups', SRR['empty_records'])
+ ])
+ args = {'use_rest': 'always'}
+ error = create_and_apply(initiator, DEFAULT_ARGS, args, 'fail')['msg']
+ assert 'Error modifying igroup initiator init1: igroup not found' in error
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_info.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_info.py
new file mode 100644
index 000000000..18c35c910
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_info.py
@@ -0,0 +1,738 @@
+# (c) 2018-2019, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests for ONTAP Ansible module na_ontap_info '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_error_message, zapi_responses
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import\
+ assert_warning_was_raised, expect_and_capture_ansible_exception, call_main, create_module, patch_ansible, print_warnings
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_info import NetAppONTAPGatherInfo as my_module, main as my_main
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_info import convert_keys as info_convert_keys, __finditem as info_finditem
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'use_rest': 'use_rest',
+}
+
+
+def net_port_info(port_type):
+ return {
+ 'attributes-list': [{
+ 'net-port-info': {
+ 'node': 'node_0',
+ 'port': 'port_0',
+ 'broadcast_domain': 'broadcast_domain_0',
+ 'ipspace': 'ipspace_0',
+ 'port_type': port_type
+ }}, {
+ 'net-port-info': {
+ 'node': 'node_1',
+ 'port': 'port_1',
+ 'broadcast_domain': 'broadcast_domain_1',
+ 'ipspace': 'ipspace_1',
+ 'port_type': port_type
+ }}
+ ]
+ }
+
+
+def net_ifgrp_info(id):
+ return {
+ 'attributes': {
+ 'net-ifgrp-info': {
+ 'ifgrp-name': 'ifgrp_%d' % id,
+ 'node': 'node_%d' % id,
+ }
+ }
+ }
+
+
+def aggr_efficiency_info(node):
+ attributes = {
+ 'aggregate': 'v2',
+ }
+ if node:
+ attributes['node'] = node
+ return {
+ 'attributes-list': [{
+ 'aggr-efficiency-info': attributes
+ }]
+ }
+
+
+def lun_info(path, next=False):
+ info = {
+ 'attributes-list': [{
+ 'lun-info': {
+ 'serial-number': 'z6CcD+SK5mPb',
+ 'vserver': 'svm1',
+ 'path': path}
+ }]
+ }
+ if next:
+ info['next-tag'] = 'next_tag'
+ return info
+
+
+list_of_one = [{'k1': 'v1'}]
+list_of_two = [{'k1': 'v1'}, {'k2': 'v2'}]
+list_of_two_dups = [{'k1': 'v1'}, {'k1': 'v2'}]
+
+
+ZRR = zapi_responses({
+ 'net_port_info': build_zapi_response(net_port_info('whatever'), 2),
+ 'net_port_info_with_ifgroup': build_zapi_response(net_port_info('if_group'), 2),
+ 'net_ifgrp_info_0': build_zapi_response(net_ifgrp_info(0), 1),
+ 'net_ifgrp_info_1': build_zapi_response(net_ifgrp_info(1), 1),
+ 'list_of_one': build_zapi_response(list_of_one),
+ 'list_of_two': build_zapi_response(list_of_two),
+ 'list_of_two_dups': build_zapi_response(list_of_two_dups),
+ 'aggr_efficiency_info': build_zapi_response(aggr_efficiency_info('v1')),
+ 'aggr_efficiency_info_no_node': build_zapi_response(aggr_efficiency_info(None)),
+ 'lun_info': build_zapi_response(lun_info('p1')),
+ 'lun_info_next_2': build_zapi_response(lun_info('p2', True)),
+ 'lun_info_next_3': build_zapi_response(lun_info('p3', True)),
+ 'lun_info_next_4': build_zapi_response(lun_info('p4', True)),
+})
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ register_responses([
+ ])
+ assert 'missing required arguments: hostname' in call_main(my_main, {}, fail=True)['msg']
+
+
+def test_ensure_command_called():
+ ''' calling get_all will raise a KeyError exception '''
+ register_responses([
+ ('ZAPI', 'system-get-ontapi-version', ZRR['version']),
+ ('ZAPI', 'net-interface-get-iter', ZRR['success']),
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ results = my_obj.get_all(['net_interface_info'])
+ assert 'net_interface_info' in results
+
+
+def test_get_generic_get_iter():
+ '''calling get_generic_get_iter will return expected dict'''
+ register_responses([
+ ('ZAPI', 'net-port-get-iter', ZRR['net_port_info']),
+ ])
+ obj = create_module(my_module, DEFAULT_ARGS)
+ result = obj.get_generic_get_iter(
+ 'net-port-get-iter',
+ attribute='net-port-info',
+ key_fields=('node', 'port'),
+ query={'max-records': '1024'}
+ )
+ assert result.get('node_0:port_0')
+ assert result.get('node_1:port_1')
+
+
+@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_info.NetAppONTAPGatherInfo.get_all')
+def test_main(get_all):
+ '''test main method - default: no state.'''
+ register_responses([
+ ])
+ get_all.side_effect = [
+ {'test_get_all': {'vserver_login_banner_info': 'test_vserver_login_banner_info', 'vserver_info': 'test_vserver_info'}}
+ ]
+ results = call_main(my_main, DEFAULT_ARGS)
+ assert 'ontap_info' in results
+ assert 'test_get_all' in results['ontap_info']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_info.NetAppONTAPGatherInfo.get_all')
+def test_main_with_state(get_all):
+ '''test main method with explicit state.'''
+ register_responses([
+ ])
+ module_args = {'state': 'some_state'}
+ get_all.side_effect = [
+ {'test_get_all': {'vserver_login_banner_info': 'test_vserver_login_banner_info', 'vserver_info': 'test_vserver_info'}}
+ ]
+ results = call_main(my_main, DEFAULT_ARGS, module_args)
+ assert 'ontap_info' in results
+ assert 'test_get_all' in results['ontap_info']
+ print_warnings()
+ assert_warning_was_raised("option 'state' is deprecated.")
+
+
+def test_get_ifgrp_info_no_ifgrp():
+ '''test get_ifgrp_info with empty ifgrp_info'''
+ register_responses([
+ ('ZAPI', 'net-port-get-iter', ZRR['net_port_info']),
+ ])
+ obj = create_module(my_module, DEFAULT_ARGS)
+ result = obj.get_ifgrp_info()
+ assert result == {}
+
+
+def test_get_ifgrp_info_with_ifgrp():
+ '''test get_ifgrp_info with empty ifgrp_info'''
+ register_responses([
+ ('ZAPI', 'net-port-get-iter', ZRR['net_port_info_with_ifgroup']),
+ ('ZAPI', 'net-port-ifgrp-get', ZRR['net_ifgrp_info_0']),
+ ('ZAPI', 'net-port-ifgrp-get', ZRR['net_ifgrp_info_1']),
+ ])
+ obj = create_module(my_module, DEFAULT_ARGS)
+ results = obj.get_ifgrp_info()
+ assert results.get('node_0:ifgrp_0')
+ assert results.get('node_1:ifgrp_1')
+
+
+def test_ontapi_error():
+ '''test ontapi will raise zapi error'''
+ register_responses([
+ ('ZAPI', 'system-get-ontapi-version', ZRR['error']),
+ ])
+ obj = create_module(my_module, DEFAULT_ARGS)
+ error = zapi_error_message('Error calling API system-get-ontapi-version')
+ assert error in expect_and_capture_ansible_exception(obj.ontapi, 'fail')['msg']
+
+
+def test_call_api_error():
+ '''test call_api will raise zapi error'''
+ register_responses([
+ ('ZAPI', 'security-key-manager-key-get-iter', ZRR['error']),
+ ('ZAPI', 'lun-get-iter', ZRR['error_missing_api']),
+ ('ZAPI', 'nvme-get-iter', ZRR['error']),
+ ])
+ obj = create_module(my_module, DEFAULT_ARGS)
+ # 1 error is ignored
+ assert obj.call_api('security-key-manager-key-get-iter') == (None, None)
+ # 2 missing API (cluster admin API not visible at vserver level)
+ error = zapi_error_message('Error invalid API. Most likely running a cluster level API as vserver', 13005)
+ assert error in expect_and_capture_ansible_exception(obj.call_api, 'fail', 'lun-get-iter')['msg']
+ # 3 API error
+ error = zapi_error_message('Error calling API nvme-get-iter')
+ assert error in expect_and_capture_ansible_exception(obj.call_api, 'fail', 'nvme-get-iter')['msg']
+
+
+def test_get_generic_get_iter_key_error():
+ register_responses([
+ ('ZAPI', 'lun-get-iter', ZRR['lun_info']),
+ ('ZAPI', 'lun-get-iter', ZRR['lun_info']),
+ ('ZAPI', 'lun-get-iter', ZRR['lun_info']),
+ ('ZAPI', 'lun-get-iter', ZRR['lun_info']),
+ ])
+ obj = create_module(my_module, DEFAULT_ARGS)
+ keys = 'single_key'
+ error = "Error: key 'single_key' not found for lun-get-iter, got:"
+ assert error in expect_and_capture_ansible_exception(obj.get_generic_get_iter, 'fail', 'lun-get-iter', None, keys)['msg']
+ keys = ('key1', 'path')
+ error = "Error: key 'key1' not found for lun-get-iter, got:"
+ assert error in expect_and_capture_ansible_exception(obj.get_generic_get_iter, 'fail', 'lun-get-iter', None, keys)['msg']
+ # ignoring key_errors
+ module_args = {'continue_on_error': 'key_error'}
+ obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ keys = 'single_key'
+ missing_key = 'Error_1_key_not_found_%s' % keys
+ results = obj.get_generic_get_iter('lun-get-iter', None, keys)
+ assert missing_key in results
+ keys = ('key1', 'path')
+ missing_key = 'Error_1_key_not_found_%s' % keys[0]
+ results = obj.get_generic_get_iter('lun-get-iter', None, keys)
+ assert missing_key in results
+
+
+def test_find_item():
+ '''test __find_item return expected key value'''
+ register_responses([
+ ])
+ obj = {"A": 1, "B": {"C": {"D": 2}}}
+ key = "D"
+ result = info_finditem(obj, key)
+ assert result == 2
+ obj = {"A": 1, "B": {"C": {"D": None}}}
+ result = info_finditem(obj, key)
+ assert result == "None"
+
+
+def test_subset_return_all_complete():
+ ''' Check all returns all of the entries if version is high enough '''
+ register_responses([
+ ])
+ version = '170' # change this if new ZAPIs are supported
+ obj = create_module(my_module, DEFAULT_ARGS)
+ subset = obj.get_subset(['all'], version)
+ assert set(obj.info_subsets.keys()) == subset
+
+
+def test_subset_return_all_partial():
+ ''' Check all returns a subset of the entries if version is low enough '''
+ register_responses([
+ ])
+ version = '120' # low enough so that some ZAPIs are not supported
+ obj = create_module(my_module, DEFAULT_ARGS)
+ subset = obj.get_subset(['all'], version)
+ all_keys = obj.info_subsets.keys()
+ assert set(all_keys) > subset
+ supported_keys = filter(lambda key: obj.info_subsets[key]['min_version'] <= version, all_keys)
+ assert set(supported_keys) == subset
+
+
+def test_subset_return_one():
+ ''' Check single entry returns one '''
+ register_responses([
+ ])
+ version = '120' # low enough so that some ZAPIs are not supported
+ obj = create_module(my_module, DEFAULT_ARGS)
+ subset = obj.get_subset(['net_interface_info'], version)
+ assert len(subset) == 1
+
+
+def test_subset_return_multiple():
+ ''' Check that more than one entry returns the same number '''
+ register_responses([
+ ])
+ version = '120' # low enough so that some ZAPIs are not supported
+ obj = create_module(my_module, DEFAULT_ARGS)
+ subset_entries = ['net_interface_info', 'net_port_info']
+ subset = obj.get_subset(subset_entries, version)
+ assert len(subset) == len(subset_entries)
+
+
+def test_subset_return_bad():
+ ''' Check that a bad subset entry will error out '''
+ register_responses([
+ ])
+ version = '120' # low enough so that some ZAPIs are not supported
+ obj = create_module(my_module, DEFAULT_ARGS)
+ error = 'Bad subset: my_invalid_subset'
+ assert error in expect_and_capture_ansible_exception(obj.get_subset, 'fail', ['net_interface_info', 'my_invalid_subset'], version)['msg']
+
+
+def test_subset_return_unsupported():
+ ''' Check that a new subset entry will error out on an older system '''
+ register_responses([
+ ])
+ version = '120' # low enough so that some ZAPIs are not supported
+ key = 'nvme_info' # only supported starting at 140
+ obj = create_module(my_module, DEFAULT_ARGS)
+ error = 'Remote system at version %s does not support %s' % (version, key)
+ assert error in expect_and_capture_ansible_exception(obj.get_subset, 'fail', ['net_interface_info', key], version)['msg']
+
+
+def test_subset_return_none():
+ ''' Check usable subset can be empty '''
+ register_responses([
+ ])
+ version = '!' # lower then 0, so that no ZAPI is supported
+ obj = create_module(my_module, DEFAULT_ARGS)
+ subset = obj.get_subset(['all'], version)
+ assert len(subset) == 0
+
+
+def test_subset_return_all_expect_one():
+ ''' Check !x returns all of the entries except x if version is high enough '''
+ register_responses([
+ ])
+ version = '170' # change this if new ZAPIs are supported
+ obj = create_module(my_module, DEFAULT_ARGS)
+ subset = obj.get_subset(['!net_interface_info'], version)
+ assert len(obj.info_subsets.keys()) == len(subset) + 1
+ subset.add('net_interface_info')
+ assert set(obj.info_subsets.keys()) == subset
+
+
+def test_subset_return_all_expect_three():
+ ''' Check !x,!y,!z returns all of the entries except x, y, z if version is high enough '''
+ register_responses([
+ ])
+ version = '170' # change this if new ZAPIs are supported
+ obj = create_module(my_module, DEFAULT_ARGS)
+ subset = obj.get_subset(['!net_interface_info', '!nvme_info', '!ontap_version'], version)
+ assert len(obj.info_subsets.keys()) == len(subset) + 3
+ subset.update(['net_interface_info', 'nvme_info', 'ontap_version'])
+ assert set(obj.info_subsets.keys()) == subset
+
+
+def test_subset_return_none_with_exclusion():
+ ''' Check usable subset can be empty with !x '''
+ register_responses([
+ ])
+ version = '!' # lower then 0, so that no ZAPI is supported
+ key = 'net_interface_info'
+ obj = create_module(my_module, DEFAULT_ARGS)
+ error = 'Remote system at version %s does not support %s' % (version, key)
+ assert error in expect_and_capture_ansible_exception(obj.get_subset, 'fail', ['!' + key], version)['msg']
+
+
+def test_get_generic_get_iter_flatten_list_of_one():
+ '''calling get_generic_get_iter will return expected dict'''
+ register_responses([
+ ('ZAPI', 'list_of_one', ZRR['list_of_one']),
+ ])
+ obj = create_module(my_module, DEFAULT_ARGS)
+ result = obj.get_generic_get_iter(
+ 'list_of_one',
+ attributes_list_tag=None,
+ )
+ print(ZRR['list_of_one'][0].to_string())
+ print(result)
+ assert isinstance(result, dict)
+ assert result.get('k1') == 'v1'
+
+
+def test_get_generic_get_iter_flatten_list_of_two():
+ '''calling get_generic_get_iter will return expected dict'''
+ register_responses([
+ ('ZAPI', 'list_of_two', ZRR['list_of_two']),
+ ])
+ obj = create_module(my_module, DEFAULT_ARGS)
+ result = obj.get_generic_get_iter(
+ 'list_of_two',
+ attributes_list_tag=None,
+ )
+ print(result)
+ assert isinstance(result, dict)
+ assert result.get('k1') == 'v1'
+ assert result.get('k2') == 'v2'
+
+
+def test_get_generic_get_iter_flatten_list_of_two_dups():
+ '''calling get_generic_get_iter will return expected dict'''
+ register_responses([
+ ('ZAPI', 'list_of_two_dups', ZRR['list_of_two_dups']),
+ ])
+ obj = create_module(my_module, DEFAULT_ARGS)
+ result = obj.get_generic_get_iter(
+ 'list_of_two_dups',
+ attributes_list_tag=None,
+ )
+ assert isinstance(result, list)
+ assert result[0].get('k1') == 'v1'
+ assert result[1].get('k1') == 'v2'
+
+
+def test_check_underscore():
+ ''' Check warning is recorded if '_' is found in key '''
+ register_responses([
+ ])
+ test_dict = dict(
+ bad_key='something'
+ )
+ test_dict['good-key'] = [dict(
+ other_bad_key=dict(
+ yet_another_bad_key=1
+ ),
+ somekey=dict(
+ more_bad_key=2
+ )
+ )]
+ obj = create_module(my_module, DEFAULT_ARGS)
+ obj.check_for___in_keys(test_dict)
+ print('Info: %s' % repr(obj.warnings))
+ for key in ['bad_key', 'other_bad_key', 'yet_another_bad_key', 'more_bad_key']:
+ msg = "Underscore in ZAPI tag: %s, do you mean '-'?" % key
+ assert msg in obj.warnings
+ obj.warnings.remove(msg)
+ # make sure there is no extra warnings (eg we found and removed all of them)
+ assert obj.warnings == list()
+
+
+def d2us(astr):
+ return str.replace(astr, '-', '_')
+
+
+def test_convert_keys_string():
+ ''' no conversion '''
+ register_responses([
+ ])
+ key = 'a-b-c'
+ assert info_convert_keys(key) == key
+
+
+def test_convert_keys_tuple():
+ ''' no conversion '''
+ register_responses([
+ ])
+ key = 'a-b-c'
+ anobject = (key, key)
+ assert info_convert_keys(anobject) == anobject
+
+
+def test_convert_keys_list():
+ ''' no conversion '''
+ register_responses([
+ ])
+ key = 'a-b-c'
+ anobject = [key, key]
+ assert info_convert_keys(anobject) == anobject
+
+
+def test_convert_keys_simple_dict():
+ ''' conversion of keys '''
+ register_responses([
+ ])
+ key = 'a-b-c'
+ anobject = {key: 1}
+ assert list(info_convert_keys(anobject).keys())[0] == d2us(key)
+
+
+def test_convert_keys_list_of_dict():
+ ''' conversion of keys '''
+ register_responses([
+ ])
+ key = 'a-b-c'
+ anobject = [{key: 1}, {key: 2}]
+ converted = info_convert_keys(anobject)
+ for adict in converted:
+ for akey in adict:
+ assert akey == d2us(key)
+
+
+def test_set_error_flags_error_n():
+ ''' Check set_error__flags return correct dict '''
+ register_responses([
+ ])
+ module_args = {'continue_on_error': ['never', 'whatever']}
+ msg = "never needs to be the only keyword in 'continue_on_error' option."
+ assert call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_set_error_flags_error_a():
+ ''' Check set_error__flags return correct dict '''
+ register_responses([
+ ])
+ module_args = {'continue_on_error': ['whatever', 'always']}
+ print('Info: %s' % call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg'])
+ msg = "always needs to be the only keyword in 'continue_on_error' option."
+ assert call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_set_error_flags_error_u():
+ ''' Check set_error__flags return correct dict '''
+ register_responses([
+ ])
+ module_args = {'continue_on_error': ['whatever', 'else']}
+
+ print('Info: %s' % call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg'])
+ msg = "whatever is not a valid keyword in 'continue_on_error' option."
+ assert call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_set_error_flags_1_flag():
+ ''' Check set_error__flags return correct dict '''
+ register_responses([
+ ])
+ module_args = {'continue_on_error': ['missing_vserver_api_error']}
+ obj = create_module(my_module, DEFAULT_ARGS, module_args, 'vserver')
+ assert not obj.error_flags['missing_vserver_api_error']
+ assert obj.error_flags['rpc_error']
+ assert obj.error_flags['other_error']
+
+
+def test_set_error_flags_2_flags():
+ ''' Check set_error__flags return correct dict '''
+ register_responses([
+ ])
+ module_args = {'continue_on_error': ['missing_vserver_api_error', 'rpc_error']}
+ obj = create_module(my_module, DEFAULT_ARGS, module_args, 'vserver')
+ assert not obj.error_flags['missing_vserver_api_error']
+ assert not obj.error_flags['rpc_error']
+ assert obj.error_flags['other_error']
+
+
+def test_set_error_flags_3_flags():
+ ''' Check set_error__flags return correct dict '''
+ register_responses([
+ ])
+ module_args = {'continue_on_error': ['missing_vserver_api_error', 'rpc_error', 'other_error']}
+ obj = create_module(my_module, DEFAULT_ARGS, module_args, 'vserver')
+ assert not obj.error_flags['missing_vserver_api_error']
+ assert not obj.error_flags['rpc_error']
+ assert not obj.error_flags['other_error']
+
+
+def test_get_subset_missing_key():
+ '''calling aggr_efficiency_info with missing key'''
+ register_responses([
+ ('ZAPI', 'aggr-efficiency-get-iter', ZRR['aggr_efficiency_info']),
+ ('ZAPI', 'aggr-efficiency-get-iter', ZRR['aggr_efficiency_info_no_node']),
+ ])
+ obj = create_module(my_module, DEFAULT_ARGS)
+ call = obj.info_subsets['aggr_efficiency_info']
+ info = call['method'](**call['kwargs'])
+ print(info)
+ assert 'v1:v2' in info
+ call = obj.info_subsets['aggr_efficiency_info']
+ info = call['method'](**call['kwargs'])
+ print(info)
+ assert 'key_not_present:v2' in info
+
+
+def test_get_lun_with_serial():
+ '''calling lun_info with serial-number key'''
+ register_responses([
+ ('ZAPI', 'system-get-ontapi-version', ZRR['success']),
+ ('ZAPI', 'lun-get-iter', ZRR['lun_info']),
+ # no records
+ ('ZAPI', 'system-get-ontapi-version', ZRR['success']),
+ ('ZAPI', 'lun-get-iter', ZRR['no_records']),
+ ])
+ obj = create_module(my_module, DEFAULT_ARGS)
+ info = obj.get_all(['lun_info'])
+ print(info)
+ assert 'lun_info' in info
+ lun_info = info['lun_info']['svm1:p1']
+ assert lun_info['serial_number'] == 'z6CcD+SK5mPb'
+ assert lun_info['serial_hex'] == '7a364363442b534b356d5062'
+ assert lun_info['naa_id'] == 'naa.600a0980' + '7a364363442b534b356d5062'
+ # no records
+ info = obj.get_all(['lun_info'])
+ assert 'lun_info' in info
+ assert info['lun_info'] is None
+ # error
+
+
+def test_get_nothing():
+ '''calling with !all'''
+ register_responses([
+ ('ZAPI', 'system-get-ontapi-version', ZRR['success']),
+ ])
+ obj = create_module(my_module, DEFAULT_ARGS)
+ info = obj.get_all(['!all'])
+ print(info)
+ assert info == {'ontap_version': '0', 'ontapi_version': '0'}
+
+
+def test_deprecation_ontap_version():
+ '''calling ontap_version'''
+ register_responses([
+ ('ZAPI', 'system-get-ontapi-version', ZRR['success']),
+ ('ZAPI', 'system-get-ontapi-version', ZRR['success']),
+ ])
+ obj = create_module(my_module, DEFAULT_ARGS)
+ info = obj.get_all(['ontap_version'])
+ assert info
+ assert 'deprecation_warning' in info
+ assert info['deprecation_warning'] == 'ontap_version is deprecated, please use ontapi_version'
+
+
+def test_help():
+ '''calling help'''
+ register_responses([
+ ('ZAPI', 'system-get-ontapi-version', ZRR['success']),
+ ])
+ obj = create_module(my_module, DEFAULT_ARGS)
+ info = obj.get_all(['help'])
+ assert info
+ assert 'help' in info
+
+
+def test_desired_attributes():
+ '''desired_attributes option'''
+ register_responses([
+ ('ZAPI', 'system-get-ontapi-version', ZRR['success']),
+ ('ZAPI', 'lun-get-iter', ZRR['success']),
+ ('ZAPI', 'system-get-ontapi-version', ZRR['success']),
+ ])
+ module_args = {'desired_attributes': {'attr': 'value'}}
+ obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ info = obj.get_all(['lun_info'])
+ assert 'lun_info' in info
+ assert info['lun_info'] is None
+ error = 'desired_attributes option is only supported with a single subset'
+ assert error in expect_and_capture_ansible_exception(obj.get_all, 'fail', ['ontapi_version', 'ontap_system_version'])['msg']
+
+
+def test_query():
+ '''query option'''
+ register_responses([
+ ('ZAPI', 'system-get-ontapi-version', ZRR['success']),
+ ('ZAPI', 'system-get-ontapi-version', ZRR['success']),
+ ('ZAPI', 'system-get-ontapi-version', ZRR['success']),
+ ])
+ module_args = {'query': {'attr': 'value', 'a_b': 'val'}}
+ obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ info = obj.get_all(['ontapi_version'])
+ assert info == {'ontap_version': '0', 'ontapi_version': '0', 'module_warnings': ["Underscore in ZAPI tag: a_b, do you mean '-'?"]}
+ error = 'query option is only supported with a single subset'
+ assert error in expect_and_capture_ansible_exception(obj.get_all, 'fail', ['ontapi_version', 'ontap_system_version'])['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_info.NetAppONTAPGatherInfo.get_all')
+def test_get_all_with_summary(mock_get_all):
+ '''all and summary'''
+ register_responses([
+ ])
+ module_args = {'summary': True, 'gather_subset': None}
+ mock_get_all.return_value = {'a_info': {'1': '1.1'}, 'b_info': {'2': '2.2'}}
+ info = call_main(my_main, DEFAULT_ARGS, module_args)
+ assert info
+ assert 'ontap_info' in info
+ assert info['ontap_info'] == {'a_info': {'1': None}.keys(), 'b_info': {'2': None}.keys()}
+
+
+def test_repeated_get():
+ '''query option'''
+ register_responses([
+ ('ZAPI', 'system-get-ontapi-version', ZRR['success']),
+ ('ZAPI', 'lun-get-iter', ZRR['lun_info_next_2']),
+ ('ZAPI', 'lun-get-iter', ZRR['lun_info_next_3']),
+ ('ZAPI', 'lun-get-iter', ZRR['lun_info_next_4']),
+ ('ZAPI', 'lun-get-iter', ZRR['lun_info']),
+ ])
+ module_args = {'query': {}}
+ obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ info = obj.get_all(['lun_info'])
+ assert info
+ assert 'lun_info' in info
+ assert len(info['lun_info']) == 4
+
+
+def test_repeated_get_error():
+ '''query option'''
+ register_responses([
+ ('ZAPI', 'lun-get-iter', ZRR['lun_info_next_2']),
+ ('ZAPI', 'lun-get-iter', ZRR['lun_info']),
+ ])
+ module_args = {'query': {}}
+ obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ error = "'next-tag' is not expected for this API"
+ assert error in expect_and_capture_ansible_exception(obj.call_api, 'fail', 'lun-get-iter', attributes_list_tag=None)['msg']
+
+
+def test_attribute_error():
+ register_responses([
+ ('ZAPI', 'system-get-ontapi-version', ZRR['success']),
+ ('ZAPI', 'license-v2-list-info', ZRR['no_records']),
+ ])
+ module_args = {'gather_subset': ['license_info'], 'vserver': 'svm'}
+ error = "Error: attribute 'licenses' not found for license-v2-list-info, got:"
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_continue_on_error():
+ register_responses([
+ ('ZAPI', 'system-get-ontapi-version', ZRR['success']),
+ ('ZAPI', 'license-v2-list-info', ZRR['error']),
+ ('ZAPI', 'system-get-ontapi-version', ZRR['success']),
+ ('ZAPI', 'license-v2-list-info', ZRR['error']),
+ ])
+ module_args = {'gather_subset': ['license_info'], 'vserver': 'svm'}
+ error = zapi_error_message('Error calling API license-v2-list-info')
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ module_args = {'gather_subset': ['license_info'], 'vserver': 'svm', 'continue_on_error': 'always'}
+ info = call_main(my_main, DEFAULT_ARGS, module_args)
+ error = {'error': zapi_error_message('Error calling API license-v2-list-info')}
+ assert info is not None
+ assert info['ontap_info']['license_info'] == error
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_interface.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_interface.py
new file mode 100644
index 000000000..129caa635
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_interface.py
@@ -0,0 +1,1778 @@
+# (c) 2018-2023, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test template for ONTAP Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import copy
+import pytest
+import sys
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import assert_no_warnings,\
+ assert_warning_was_raised, print_warnings, call_main, create_module, expect_and_capture_ansible_exception, patch_ansible
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import\
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_error_message, rest_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_error, build_zapi_response, zapi_responses
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_interface \
+ import NetAppOntapInterface as interface_module, main as my_main
+
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not be available')
+
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+def interface_info(dns=True, address='2.2.2.2', netmask='1.1.1.1'):
+ info = {
+ 'attributes-list': {
+ 'net-interface-info': {
+ 'interface-name': 'abc_if',
+ 'administrative-status': 'up',
+ 'failover-group': 'failover_group',
+ 'failover-policy': 'up',
+ 'firewall-policy': 'up',
+ 'is-auto-revert': 'true',
+ 'home-node': 'node',
+ 'current-node': 'node',
+ 'home-port': 'e0c',
+ 'current-port': 'e0c',
+ 'address': address,
+ 'netmask': netmask,
+ 'role': 'data',
+ 'listen-for-dns-query': 'true',
+ 'is-dns-update-enabled': 'true',
+ 'is-ipv4-link-local': 'false',
+ 'service-policy': 'service_policy',
+ }
+ }
+ }
+ if dns:
+ info['attributes-list']['net-interface-info']['dns-domain-name'] = 'test.com'
+ return info
+
+
+node_info = {
+ 'attributes-list': {
+ 'cluster-node-info': {
+ 'node-name': 'node_1',
+ }
+ }
+}
+
+
+ZRR = zapi_responses({
+ 'interface_info': build_zapi_response(interface_info(), 1),
+ 'interface_ipv4': build_zapi_response(interface_info(address='10.10.10.13', netmask='255.255.255.0'), 1),
+ 'interface_info_no_dns': build_zapi_response(interface_info(dns=False), 1),
+ 'node_info': build_zapi_response(node_info, 1),
+ 'error_17': build_zapi_error(17, 'A LIF with the same name already exists'),
+ 'error_13003': build_zapi_error(13003, 'ZAPI is not enabled in pre-cluster mode.'),
+})
+
+
+DEFAULT_ARGS = {
+ 'hostname': '10.10.10.10',
+ 'username': 'admin',
+ 'password': 'password',
+ 'home_port': 'e0c',
+ 'interface_name': 'abc_if',
+}
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ module_args = {
+ 'vserver': 'vserver',
+ 'use_rest': 'never'
+ }
+ error = create_module(interface_module, module_args, fail=True)['msg']
+ assert 'missing required arguments:' in error
+ assert 'interface_name' in error
+
+
+def test_create_error_missing_param():
+ ''' Test successful create '''
+ register_responses([
+ ('ZAPI', 'net-interface-get-iter', ZRR['no_records']),
+ ])
+ module_args = {
+ 'vserver': 'vserver',
+ 'home_node': 'node',
+ 'use_rest': 'never'
+ }
+ msg = 'Error: Missing one or more required parameters for creating interface:'
+ error = call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert msg in error
+ assert 'role' in error
+
+
+def test_successful_create():
+ ''' Test successful create '''
+ register_responses([
+ ('ZAPI', 'net-interface-get-iter', ZRR['no_records']),
+ ('ZAPI', 'net-interface-create', ZRR['success'])
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'vserver': 'vserver',
+ 'home_node': 'node',
+ 'role': 'data',
+ # 'subnet_name': 'subnet_name',
+ 'address': '10.10.10.13',
+ 'netmask': '255.255.255.0',
+ 'failover_policy': 'system-defined',
+ 'failover_group': 'failover_group',
+ 'firewall_policy': 'firewall_policy',
+ 'is_auto_revert': True,
+ 'admin_status': 'down',
+ 'force_subnet_association': True,
+ 'dns_domain_name': 'dns_domain_name',
+ 'listen_for_dns_query': True,
+ 'is_dns_update_enabled': True,
+ # 'is_ipv4_link_local': False,
+ 'service_policy': 'service_policy'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_ip_subnet_cidr_mask():
+ ''' Test successful modify ip/subnet mask '''
+ register_responses([
+ ('ZAPI', 'net-interface-get-iter', ZRR['interface_info']),
+ ('ZAPI', 'net-interface-modify', ZRR['success']),
+ ('ZAPI', 'net-interface-get-iter', ZRR['interface_ipv4']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'vserver': 'vserver',
+ 'home_node': 'node',
+ 'role': 'data',
+ 'address': '10.10.10.13',
+ 'netmask': '24'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successful_create_for_NVMe():
+ ''' Test successful create for NVMe protocol'''
+ register_responses([
+ ('ZAPI', 'net-interface-get-iter', ZRR['no_records']),
+ ('ZAPI', 'cluster-node-get-iter', ZRR['node_info']),
+ ('ZAPI', 'net-interface-create', ZRR['success']),
+ ])
+ module_args = {
+ 'vserver': 'vserver',
+ # 'home_node': 'node',
+ 'role': 'data',
+ 'protocols': ['fc-nvme'],
+ 'subnet_name': 'subnet_name',
+ 'use_rest': 'never'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_create_idempotency_for_NVMe():
+ ''' Test successful create for NVMe protocol'''
+ register_responses([
+ ('ZAPI', 'net-interface-get-iter', ZRR['interface_info']),
+ ])
+ module_args = {
+ 'vserver': 'vserver',
+ 'home_node': 'node',
+ 'role': 'data',
+ 'protocols': ['fc-nvme'],
+ 'use_rest': 'never'
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_create_error_for_NVMe():
+ ''' Test if create throws an error if required param 'protocols' uses NVMe'''
+ register_responses([
+ ('ZAPI', 'net-interface-get-iter', ZRR['no_records']),
+ ])
+ msg = 'Error: Following parameters for creating interface are not supported for data-protocol fc-nvme:'
+ module_args = {
+ 'vserver': 'vserver',
+ 'protocols': ['fc-nvme'],
+ 'address': '1.1.1.1',
+ 'use_rest': 'never'
+ }
+ error = call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert msg in error
+ for option in ('netmask', 'address', 'firewall_policy'):
+ assert option in error
+
+
+def test_create_idempotency():
+ ''' Test create idempotency, and ignore EMS logging error '''
+ register_responses([
+ ('ZAPI', 'net-interface-get-iter', ZRR['interface_info']),
+ ])
+ module_args = {
+ 'vserver': 'vserver',
+ 'use_rest': 'never'
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successful_delete():
+ ''' Test delete existing interface, and ignore EMS logging error '''
+ register_responses([
+ ('ZAPI', 'net-interface-get-iter', ZRR['interface_info_no_dns']),
+ ('ZAPI', 'net-interface-modify', ZRR['success']), # offline
+ ('ZAPI', 'net-interface-delete', ZRR['success']),
+ ])
+ module_args = {
+ 'state': 'absent',
+ 'vserver': 'vserver',
+ 'use_rest': 'never'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_delete_idempotency():
+ ''' Test delete idempotency '''
+ register_responses([
+ ('ZAPI', 'net-interface-get-iter', ZRR['no_records']),
+ ])
+ module_args = {
+ 'state': 'absent',
+ 'vserver': 'vserver',
+ 'use_rest': 'never'
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successful_modify():
+ ''' Test successful modify interface_minutes '''
+ register_responses([
+ ('ZAPI', 'net-interface-get-iter', ZRR['interface_info']),
+ ('ZAPI', 'net-interface-modify', ZRR['success']),
+ ])
+ module_args = {
+ 'vserver': 'vserver',
+ 'dns_domain_name': 'test2.com',
+ 'home_port': 'e0d',
+ 'is_dns_update_enabled': False,
+ 'is_ipv4_link_local': True,
+ 'listen_for_dns_query': False,
+ 'use_rest': 'never'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_idempotency():
+ ''' Test modify idempotency '''
+ register_responses([
+ ('ZAPI', 'net-interface-get-iter', ZRR['interface_info']),
+ ])
+ module_args = {
+ 'vserver': 'vserver',
+ 'use_rest': 'never'
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_message():
+ register_responses([
+ # create, missing params
+ ('ZAPI', 'net-interface-get-iter', ZRR['no_records']),
+ ('ZAPI', 'cluster-node-get-iter', ZRR['no_records']),
+
+ # create - get home_node error
+ ('ZAPI', 'net-interface-get-iter', ZRR['no_records']),
+ ('ZAPI', 'cluster-node-get-iter', ZRR['error']),
+
+ # create error
+ ('ZAPI', 'net-interface-get-iter', ZRR['no_records']),
+ ('ZAPI', 'cluster-node-get-iter', ZRR['error_13003']),
+ ('ZAPI', 'net-interface-create', ZRR['error']),
+
+ # create error
+ ('ZAPI', 'net-interface-get-iter', ZRR['no_records']),
+ ('ZAPI', 'cluster-node-get-iter', ZRR['no_records']),
+ ('ZAPI', 'net-interface-create', ZRR['error_17']),
+
+ # modify error
+ ('ZAPI', 'net-interface-get-iter', ZRR['interface_info']),
+ ('ZAPI', 'net-interface-modify', ZRR['error']),
+
+ # rename error
+ ('ZAPI', 'net-interface-get-iter', ZRR['no_records']),
+ ('ZAPI', 'net-interface-get-iter', ZRR['interface_info']),
+ ('ZAPI', 'net-interface-rename', ZRR['error']),
+
+ # delete error
+ ('ZAPI', 'net-interface-get-iter', ZRR['interface_info']),
+ ('ZAPI', 'net-interface-modify', ZRR['success']),
+ ('ZAPI', 'net-interface-delete', ZRR['error']),
+
+ # get error
+ ('ZAPI', 'net-interface-get-iter', ZRR['error']),
+ ])
+ module_args = {
+ 'vserver': 'vserver',
+ 'use_rest': 'never',
+ }
+ msg = 'Error: Missing one or more required parameters for creating interface:'
+ assert msg in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ module_args['home_port'] = 'e0d'
+ module_args['role'] = 'data'
+ module_args['address'] = '10.11.12.13'
+ module_args['netmask'] = '255.192.0.0'
+ msg = 'Error fetching node for interface abc_if: NetApp API failed. Reason - 12345:'
+ assert msg in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ msg = 'Error Creating interface abc_if: NetApp API failed. Reason - 12345:'
+ assert msg in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ # LIF already exists (error 17)
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ module_args['home_port'] = 'new_port'
+ msg = 'Error modifying interface abc_if: NetApp API failed. Reason - 12345:'
+ assert msg in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ module_args['from_name'] = 'old_name'
+ msg = 'Error renaming old_name to abc_if: NetApp API failed. Reason - 12345:'
+ assert msg in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ module_args['state'] = 'absent'
+ msg = 'Error deleting interface abc_if: NetApp API failed. Reason - 12345:'
+ assert msg in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ msg = 'Error fetching interface details for abc_if: NetApp API failed. Reason - 12345:'
+ assert msg in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_successful_rename():
+ ''' Test successful '''
+ register_responses([
+ ('ZAPI', 'net-interface-get-iter', ZRR['no_records']),
+ ('ZAPI', 'net-interface-get-iter', ZRR['interface_info']),
+ ('ZAPI', 'net-interface-rename', ZRR['success']),
+ ('ZAPI', 'net-interface-modify', ZRR['success']),
+ ])
+ module_args = {
+ 'vserver': 'vserver',
+ 'dns_domain_name': 'test2.com',
+ 'from_name': 'from_interface_name',
+ 'home_port': 'new_port',
+ 'is_dns_update_enabled': False,
+ 'listen_for_dns_query': False,
+ 'use_rest': 'never'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_negative_rename_not_found():
+ ''' Test from interface not found '''
+ register_responses([
+ ('ZAPI', 'net-interface-get-iter', ZRR['no_records']),
+ ('ZAPI', 'net-interface-get-iter', ZRR['no_records']),
+ ])
+ msg = 'Error renaming interface abc_if: no interface with from_name from_interface_name.'
+ module_args = {
+ 'vserver': 'vserver',
+ 'dns_domain_name': 'test2.com',
+ 'from_name': 'from_interface_name',
+ 'home_port': 'new_port',
+ 'is_dns_update_enabled': False,
+ 'listen_for_dns_query': False,
+ 'use_rest': 'never'
+ }
+ assert msg in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_successful_migrate():
+ ''' Test successful '''
+ register_responses([
+ ('ZAPI', 'net-interface-get-iter', ZRR['interface_info']),
+ ('ZAPI', 'net-interface-modify', ZRR['success']),
+ ('ZAPI', 'net-interface-migrate', ZRR['success']),
+ ('ZAPI', 'net-interface-migrate', ZRR['success']),
+ ])
+ module_args = {
+ 'vserver': 'vserver',
+ 'dns_domain_name': 'test2.com',
+ 'current_node': 'new_node',
+ 'is_dns_update_enabled': False,
+ 'listen_for_dns_query': False,
+ 'use_rest': 'never'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_negative_migrate():
+ ''' Test successful '''
+ register_responses([
+ ('ZAPI', 'net-interface-get-iter', ZRR['interface_info']),
+ ('ZAPI', 'net-interface-modify', ZRR['success']),
+
+ # 2nd try
+ ('ZAPI', 'net-interface-get-iter', ZRR['interface_info']),
+ ('ZAPI', 'net-interface-modify', ZRR['success']),
+ ('ZAPI', 'net-interface-migrate', ZRR['error']),
+
+ # 3rd try
+ ('ZAPI', 'net-interface-get-iter', ZRR['interface_info']),
+ ('ZAPI', 'net-interface-modify', ZRR['success']),
+ ('ZAPI', 'net-interface-migrate', ZRR['success']),
+ ('ZAPI', 'net-interface-migrate', ZRR['error']),
+ ])
+ module_args = {
+ 'vserver': 'vserver',
+ 'dns_domain_name': 'test2.com',
+ 'current_port': 'new_port',
+ 'is_dns_update_enabled': False,
+ 'listen_for_dns_query': False,
+ 'use_rest': 'never'
+ }
+ msg = 'current_node must be set to migrate'
+ assert msg in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ module_args['current_node'] = 'new_node'
+ msg = 'Error migrating new_node: NetApp API failed. Reason - 12345'
+ assert msg in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ msg = 'Error migrating new_node: NetApp API failed. Reason - 12345'
+ assert msg in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+SRR = rest_responses({
+ 'one_record_home_node': (200, {'records': [
+ {'name': 'node2_abc_if',
+ 'uuid': '54321',
+ 'enabled': True,
+ 'location': {'home_port': {'name': 'e0c'}, 'home_node': {'name': 'node2'}, 'node': {'name': 'node2'}, 'port': {'name': 'e0c'}}
+ }]}, None),
+ 'one_record_vserver': (200, {'records': [{
+ 'name': 'abc_if',
+ 'uuid': '54321',
+ 'svm': {'name': 'vserver', 'uuid': 'svm_uuid'},
+ 'dns_zone': 'netapp.com',
+ 'ddns_enabled': True,
+ 'data_protocol': ['nfs'],
+ 'enabled': True,
+ 'ip': {'address': '10.11.12.13', 'netmask': '10'},
+ 'location': {
+ 'home_port': {'name': 'e0c'},
+ 'home_node': {'name': 'node2'},
+ 'node': {'name': 'node2'},
+ 'port': {'name': 'e0c'},
+ 'auto_revert': True,
+ 'failover': True
+ },
+ 'service_policy': {'name': 'data-mgmt'},
+ 'probe_port': 65431
+ }]}, None),
+ 'one_record_vserver_subnet1': (200, {'records': [{
+ 'name': 'abc_if',
+ 'uuid': '54321',
+ 'svm': {'name': 'vserver', 'uuid': 'svm_uuid'},
+ 'dns_zone': 'netapp.com',
+ 'ddns_enabled': True,
+ 'data_protocol': ['nfs'],
+ 'enabled': True,
+ 'ip': {'address': '10.11.12.13', 'netmask': '10'},
+ 'location': {
+ 'home_port': {'name': 'e0c'},
+ 'home_node': {'name': 'node2'},
+ 'node': {'name': 'node2'},
+ 'port': {'name': 'e0c'},
+ 'auto_revert': True,
+ 'failover': True
+ },
+ 'service_policy': {'name': 'data-mgmt'},
+ 'subnet': {'name': 'subnet1'}
+ }]}, None),
+ 'one_record_fcp': (200, {'records': [{
+ 'data_protocol': 'fcp',
+ 'enabled': False,
+ 'location': {
+ 'home_node': {'name': 'ontap910-01', 'uuid': 'ecb4061b'},
+ 'home_port': {'name': '1a', 'node': {'name': 'ontap910-01'}, 'uuid': '1c9a72de'},
+ 'is_home': True,
+ 'node': {'name': 'ontap910-01', 'uuid': 'ecb4061b'},
+ 'port': {'name': '1a', 'node': {'name': 'ontap910-01'}, 'uuid': '1c9a72de'}
+ },
+ 'name': 'abc_if',
+ 'svm': {'name': 'svm0_iscsi', 'uuid': 'a59e775d'},
+ 'uuid': 'a3935ab5'
+ }]}, None),
+ 'two_records': (200, {'records': [{'name': 'node2_abc_if'}, {'name': 'node2_abc_if'}]}, None),
+ 'error_precluster': (500, None, {'message': 'are available in precluster.'}),
+ 'cluster_identity': (200, {'location': 'Oz', 'name': 'abc'}, None),
+ 'nodes': (200, {'records': [
+ {'name': 'node2', 'uuid': 'uuid2', 'cluster_interfaces': [{'ip': {'address': '10.10.10.2'}}]}
+ ]}, None),
+ 'nodes_two_records': (200, {'records': [
+ {'name': 'node2', 'uuid': 'uuid2', 'cluster_interfaces': [{'ip': {'address': '10.10.10.2'}}]},
+ {'name': 'node3', 'uuid': 'uuid2', 'cluster_interfaces': [{'ip': {'address': '10.10.10.2'}}]}
+ ]}, None),
+}, False)
+
+
+def test_rest_create_ip_no_svm():
+ ''' create cluster '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'network/ip/interfaces', SRR['zero_records']), # get IP
+ ('GET', 'cluster/nodes', SRR['nodes']), # get nodes
+ ('POST', 'network/ip/interfaces', SRR['success']), # post
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'ipspace': 'cluster',
+ 'address': '10.12.12.13',
+ 'netmask': '255.255.192.0',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_create_ip_no_svm_idempotent():
+ ''' create cluster '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'network/ip/interfaces', SRR['one_record_home_node']), # get IP
+ ('GET', 'cluster/nodes', SRR['nodes']), # get nodes
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'ipspace': 'cluster',
+ 'address': '10.12.12.13',
+ 'netmask': '255.255.192.0',
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_create_ip_no_svm_idempotent_localhost():
+ ''' create cluster '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'network/ip/interfaces', SRR['one_record_home_node']), # get IP
+ ('GET', 'cluster/nodes', SRR['nodes']), # get nodes
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'ipspace': 'cluster',
+ 'home_node': 'localhost',
+ 'address': '10.12.12.13',
+ 'netmask': '255.255.192.0',
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_create_ip_with_svm():
+ ''' create cluster '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'network/ip/interfaces', SRR['zero_records']), # get IP
+ ('GET', 'cluster/nodes', SRR['nodes']), # get nodes
+ ('POST', 'network/ip/interfaces', SRR['success']), # post
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'ipspace': 'cluster',
+ 'vserver': 'vserver',
+ 'address': '10.12.12.13',
+ 'netmask': '255.255.192.0',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_create_fc_with_svm():
+ ''' create FC interface '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'network/fc/interfaces', SRR['zero_records']), # get FC
+ ('POST', 'network/fc/interfaces', SRR['success']), # post
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'vserver': 'vserver',
+ 'data_protocol': 'fc_nvme',
+ 'home_node': 'my_node',
+ 'protocols': 'fc-nvme'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_create_fc_with_svm_no_home_port():
+ ''' create FC interface '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'network/fc/interfaces', SRR['zero_records']), # get FC
+ ('GET', 'cluster/nodes', SRR['nodes']), # get nodes
+ ('POST', 'network/fc/interfaces', SRR['success']), # post
+ ])
+ args = dict(DEFAULT_ARGS)
+ module_args = {
+ 'use_rest': 'always',
+ 'vserver': 'vserver',
+ 'data_protocol': 'fc_nvme',
+ 'protocols': 'fc-nvme',
+ 'current_port': args.pop('home_port'),
+ 'current_node': 'my_node',
+ }
+ assert call_main(my_main, args, module_args)['changed']
+
+
+@patch('time.sleep')
+def test_rest_create_ip_with_cluster_svm(dont_sleep):
+ ''' create cluster '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'network/ip/interfaces', SRR['zero_records']), # get IP
+ ('GET', 'cluster/nodes', SRR['nodes']), # get nodes
+ ('POST', 'network/ip/interfaces', SRR['one_record_vserver']), # post
+ ('PATCH', 'network/ip/interfaces/54321', SRR['one_record_vserver']), # migrate
+ ('GET', 'network/ip/interfaces', SRR['one_record_vserver']), # get IP
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'admin_status': 'up',
+ 'current_port': 'e0c',
+ 'failover_scope': 'home_node_only',
+ 'ipspace': 'cluster',
+ 'vserver': 'vserver',
+ 'address': '10.12.12.13',
+ 'netmask': '255.255.192.0',
+ 'role': 'intercluster',
+ 'probe_port': 65431,
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ print_warnings()
+ assert_warning_was_raised('Ignoring vserver with REST for non data SVM.')
+
+
+def test_rest_negative_create_ip():
+ ''' create cluster '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'network/ip/interfaces', SRR['zero_records']), # get IP
+ ('GET', 'cluster/nodes', SRR['zero_records']), # get nodes
+ ])
+ msg = 'Error: Cannot guess home_node, home_node is required when home_port is present with REST.'
+ module_args = {
+ 'use_rest': 'always',
+ 'ipspace': 'cluster',
+ 'address': '10.12.12.13',
+ 'netmask': '255.255.192.0',
+ }
+ assert msg in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_rest_negative_create_ip_with_svm_no_home_port():
+ ''' create FC interface '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'network/ip/interfaces', SRR['zero_records']), # get IP
+ ('GET', 'cluster/nodes', SRR['nodes']), # get nodes
+ # ('POST', 'network/fc/interfaces', SRR['success']), # post
+ ])
+ args = dict(DEFAULT_ARGS)
+ args.pop('home_port')
+ module_args = {
+ 'use_rest': 'always',
+ 'vserver': 'vserver',
+ 'interface_type': 'ip',
+ }
+ error = "Error: At least one of 'broadcast_domain', 'home_port', 'home_node' is required to create an IP interface."
+ assert error in call_main(my_main, args, module_args, fail=True)['msg']
+
+
+def test_rest_negative_create_no_ip_address():
+ ''' create cluster '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'network/ip/interfaces', SRR['zero_records']), # get IP
+ ('GET', 'cluster/nodes', SRR['nodes']), # get nodes
+ ])
+ msg = 'Error: Missing one or more required parameters for creating interface: interface_type.'
+ module_args = {
+ 'use_rest': 'always',
+ 'ipspace': 'cluster',
+ }
+ assert msg in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_rest_get_fc_no_svm():
+ ''' create cluster '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'cluster/nodes', SRR['nodes']), # get nodes
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'interface_type': 'fc',
+ }
+ msg = "A data 'vserver' is required for FC interfaces."
+ assert msg in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_rest_negative_get_multiple_ip_if():
+ ''' create cluster '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'network/ip/interfaces', SRR['two_records']), # get IP
+ ('GET', 'cluster/nodes', SRR['nodes']), # get nodes
+ ])
+ msg = 'Error: multiple records for: node2_abc_if'
+ module_args = {
+ 'use_rest': 'always',
+ 'ipspace': 'cluster',
+ }
+ assert msg in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_rest_negative_get_multiple_fc_if():
+ ''' create cluster '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'network/ip/interfaces', SRR['zero_records']), # get IP
+ ('GET', 'network/fc/interfaces', SRR['two_records']), # get FC
+ ])
+ msg = 'Error: unexpected records for name: abc_if, vserver: not_cluster'
+ module_args = {
+ 'use_rest': 'always',
+ 'vserver': 'not_cluster',
+ }
+ assert msg in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_rest_negative_get_multiple_ip_fc_if():
+ ''' create cluster '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'network/ip/interfaces', SRR['one_record_vserver']), # get IP
+ ('GET', 'network/fc/interfaces', SRR['one_record_vserver']), # get FC
+ ])
+ msg = 'Error fetching interface abc_if - found duplicate entries, please indicate interface_type.'
+ module_args = {
+ 'use_rest': 'always',
+ 'vserver': 'not_cluster',
+ }
+ assert msg in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_rest_modify_idempotent_ip_no_svm():
+ ''' create cluster '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'network/ip/interfaces', SRR['one_record_home_node']), # get IP
+ ('GET', 'cluster/nodes', SRR['nodes']), # get nodes
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'ipspace': 'cluster',
+ 'address': '10.12.12.13',
+ 'netmask': '255.255.192.0',
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_modify_ip_no_svm():
+ ''' create cluster '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'network/ip/interfaces', SRR['zero_records']), # get IP
+ ('GET', 'network/ip/interfaces', SRR['one_record_home_node']), # get IP
+ ('PATCH', 'network/ip/interfaces/54321', SRR['success']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'ipspace': 'cluster',
+ 'address': '10.12.12.13',
+ 'netmask': '255.255.192.0',
+ 'home_node': 'node2',
+ 'interface_name': 'new_name',
+ 'from_name': 'abc_if'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_modify_ip_svm():
+ ''' create cluster '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'network/ip/interfaces', SRR['one_record_vserver']), # get IP
+ ('PATCH', 'network/ip/interfaces/54321', SRR['success']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'vserver': 'vserver',
+ 'address': '10.12.12.13',
+ 'netmask': '255.255.192.0',
+ 'home_node': 'node1',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+@patch('time.sleep')
+def test_rest_migrate_ip_no_svm(sleep_mock):
+ ''' create cluster '''
+ modified = copy.deepcopy(SRR['one_record_home_node'])
+ modified[1]['records'][0]['location']['node']['name'] = 'node1'
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'network/ip/interfaces', SRR['one_record_home_node']), # get IP
+ ('GET', 'cluster/nodes', SRR['nodes']), # get nodes (for get)
+ ('PATCH', 'network/ip/interfaces/54321', SRR['success']),
+ ('GET', 'network/ip/interfaces', SRR['one_record_home_node']), # get - no change
+ ('PATCH', 'network/ip/interfaces/54321', SRR['success']),
+ ('GET', 'network/ip/interfaces', modified),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'ipspace': 'cluster',
+ 'address': '10.12.12.13',
+ 'netmask': '255.255.192.0',
+ 'current_node': 'node1',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+@patch('time.sleep')
+def test_rest_migrate_ip_no_svm_port(sleep_mock):
+ ''' create cluster '''
+ modified = copy.deepcopy(SRR['one_record_home_node'])
+ modified[1]['records'][0]['location']['port']['name'] = 'port1'
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'network/ip/interfaces', SRR['one_record_home_node']), # get IP
+ ('GET', 'cluster/nodes', SRR['nodes']), # get nodes (for get)
+ ('PATCH', 'network/ip/interfaces/54321', SRR['success']),
+ ('GET', 'network/ip/interfaces', SRR['one_record_home_node']), # get - no change
+ ('PATCH', 'network/ip/interfaces/54321', SRR['success']),
+ ('GET', 'network/ip/interfaces', modified),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'ipspace': 'cluster',
+ 'address': '10.12.12.13',
+ 'netmask': '255.255.192.0',
+ 'current_port': 'port1',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+@patch('time.sleep')
+def test_rest_migrate_ip_svm(sleep_mock):
+ ''' create cluster '''
+ modified = copy.deepcopy(SRR['one_record_home_node'])
+ modified[1]['records'][0]['location']['node']['name'] = 'node1'
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'network/ip/interfaces', SRR['one_record_home_node']), # get IP
+ ('GET', 'cluster/nodes', SRR['nodes']), # get nodes (for get)
+ ('PATCH', 'network/ip/interfaces/54321', SRR['success']),
+ ('PATCH', 'network/ip/interfaces/54321', SRR['success']),
+ ('GET', 'network/ip/interfaces', SRR['generic_error']),
+ ('PATCH', 'network/ip/interfaces/54321', SRR['success']),
+ ('GET', 'network/ip/interfaces', SRR['generic_error']),
+ ('PATCH', 'network/ip/interfaces/54321', SRR['success']),
+ ('GET', 'network/ip/interfaces', modified),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'ipspace': 'cluster',
+ 'address': '10.12.12.13',
+ 'netmask': '255.255.192.0',
+ 'current_node': 'node1',
+ 'vserver': 'vserver'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+@patch('time.sleep')
+def test_rest_migrate_ip_error(sleep_mock):
+ ''' create cluster '''
+ modified = copy.deepcopy(SRR['one_record_home_node'])
+ modified[1]['records'][0]['location']['node']['name'] = 'node1'
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'network/ip/interfaces', SRR['one_record_home_node']), # get IP
+ ('GET', 'cluster/nodes', SRR['nodes']), # get nodes (for get)
+ ('PATCH', 'network/ip/interfaces/54321', SRR['success']),
+ ('PATCH', 'network/ip/interfaces/54321', SRR['success']),
+ ('GET', 'network/ip/interfaces', SRR['generic_error']),
+ ('PATCH', 'network/ip/interfaces/54321', SRR['success']),
+ ('GET', 'network/ip/interfaces', SRR['generic_error']),
+ ('PATCH', 'network/ip/interfaces/54321', SRR['success']),
+ ('GET', 'network/ip/interfaces', SRR['generic_error']),
+ ('PATCH', 'network/ip/interfaces/54321', SRR['success']),
+ ('GET', 'network/ip/interfaces', SRR['generic_error']),
+ ('PATCH', 'network/ip/interfaces/54321', SRR['success']),
+ ('GET', 'network/ip/interfaces', SRR['generic_error']),
+ ('PATCH', 'network/ip/interfaces/54321', SRR['success']),
+ ('GET', 'network/ip/interfaces', SRR['generic_error']),
+ ('PATCH', 'network/ip/interfaces/54321', SRR['success']),
+ ('GET', 'network/ip/interfaces', SRR['generic_error']),
+ ('PATCH', 'network/ip/interfaces/54321', SRR['success']),
+ ('GET', 'network/ip/interfaces', SRR['generic_error']),
+ ('PATCH', 'network/ip/interfaces/54321', SRR['success']),
+ ('GET', 'network/ip/interfaces', SRR['one_record_home_node']),
+ ('PATCH', 'network/ip/interfaces/54321', SRR['success']),
+ ('GET', 'network/ip/interfaces', SRR['one_record_home_node']),
+ ('PATCH', 'network/ip/interfaces/54321', SRR['success']),
+ ('GET', 'network/ip/interfaces', SRR['one_record_home_node']),
+ ('PATCH', 'network/ip/interfaces/54321', SRR['success']),
+ ('GET', 'network/ip/interfaces', SRR['one_record_home_node']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'ipspace': 'cluster',
+ 'address': '10.12.12.13',
+ 'netmask': '255.255.192.0',
+ 'current_node': 'node1',
+ 'vserver': 'vserver'
+ }
+ error = rest_error_message('Errors waiting for migration to complete', 'network/ip/interfaces')
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+@patch('time.sleep')
+def test_rest_migrate_ip_timeout(sleep_mock):
+ ''' create cluster '''
+ modified = copy.deepcopy(SRR['one_record_home_node'])
+ modified[1]['records'][0]['location']['node']['name'] = 'node1'
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'network/ip/interfaces', SRR['one_record_home_node']), # get IP
+ ('GET', 'cluster/nodes', SRR['nodes']), # get nodes (for get)
+ ('PATCH', 'network/ip/interfaces/54321', SRR['success']),
+ ('PATCH', 'network/ip/interfaces/54321', SRR['success']),
+ ('GET', 'network/ip/interfaces', SRR['one_record_home_node']),
+ ('PATCH', 'network/ip/interfaces/54321', SRR['success']),
+ ('GET', 'network/ip/interfaces', SRR['one_record_home_node']),
+ ('PATCH', 'network/ip/interfaces/54321', SRR['success']),
+ ('GET', 'network/ip/interfaces', SRR['one_record_home_node']),
+ ('PATCH', 'network/ip/interfaces/54321', SRR['success']),
+ ('GET', 'network/ip/interfaces', SRR['one_record_home_node']),
+ ('PATCH', 'network/ip/interfaces/54321', SRR['success']),
+ ('GET', 'network/ip/interfaces', SRR['one_record_home_node']),
+ ('PATCH', 'network/ip/interfaces/54321', SRR['success']),
+ ('GET', 'network/ip/interfaces', SRR['one_record_home_node']),
+ ('PATCH', 'network/ip/interfaces/54321', SRR['success']),
+ ('GET', 'network/ip/interfaces', SRR['one_record_home_node']),
+ ('PATCH', 'network/ip/interfaces/54321', SRR['success']),
+ ('GET', 'network/ip/interfaces', SRR['one_record_home_node']),
+ ('PATCH', 'network/ip/interfaces/54321', SRR['success']),
+ ('GET', 'network/ip/interfaces', SRR['one_record_home_node']),
+ ('PATCH', 'network/ip/interfaces/54321', SRR['success']),
+ ('GET', 'network/ip/interfaces', SRR['one_record_home_node']),
+ ('PATCH', 'network/ip/interfaces/54321', SRR['success']),
+ ('GET', 'network/ip/interfaces', SRR['one_record_home_node']),
+ ('PATCH', 'network/ip/interfaces/54321', SRR['success']),
+ ('GET', 'network/ip/interfaces', SRR['one_record_home_node']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'ipspace': 'cluster',
+ 'address': '10.12.12.13',
+ 'netmask': '255.255.192.0',
+ 'current_node': 'node1',
+ 'vserver': 'vserver'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert_warning_was_raised('Failed to confirm interface is migrated after 120 seconds')
+
+
+def test_rest_create_migrate_fc_error():
+ ''' create cluster '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'network/fc/interfaces', SRR['empty_records']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'network/fc/interfaces', SRR['one_record_fcp'])
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'home_node': 'ontap910-01',
+ 'current_node': 'ontap910-02',
+ 'current_port': '1b',
+ 'interface_type': 'fc',
+ 'vserver': 'svm0_iscsi'
+ }
+ error = 'Error: Missing one or more required parameters for creating interface'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ module_args['home_port'] = '1a'
+ error = 'Error: cannot migrate FC interface'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_rest_delete_ip_no_svm():
+ ''' create cluster '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'network/ip/interfaces', SRR['one_record_home_node']), # get IP
+ ('GET', 'cluster/nodes', SRR['nodes']), # get nodes (for get)
+ ('DELETE', 'network/ip/interfaces/54321', SRR['success']), # delete
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'ipspace': 'cluster',
+ 'address': '10.12.12.13',
+ 'netmask': '255.255.192.0',
+ 'state': 'absent',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_disable_delete_fc():
+ ''' create cluster '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'network/fc/interfaces', SRR['one_record_vserver']), # get IP
+ ('PATCH', 'network/fc/interfaces/54321', SRR['success']), # disable fc before delete
+ ('DELETE', 'network/fc/interfaces/54321', SRR['success']), # delete
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'state': 'absent',
+ "admin_status": "up",
+ "protocols": "fc-nvme",
+ "role": "data",
+ "vserver": "svm3",
+ "current_port": "1a"
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_delete_idempotent_ip_no_svm():
+ ''' create cluster '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'network/ip/interfaces', SRR['zero_records']), # get IP
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'ipspace': 'cluster',
+ 'address': '10.12.12.13',
+ 'netmask': '255.255.192.0',
+ 'state': 'absent',
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_derive_fc_protocol_fcp():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'protocols': ['fcp'],
+ }
+ my_obj = create_module(interface_module, DEFAULT_ARGS, module_args)
+ my_obj.derive_fc_data_protocol()
+ assert my_obj.parameters['data_protocol'] == 'fcp'
+
+
+def test_derive_fc_protocol_nvme():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'protocols': ['fc-nvme'],
+ }
+ my_obj = create_module(interface_module, DEFAULT_ARGS, module_args)
+ my_obj.derive_fc_data_protocol()
+ assert my_obj.parameters['data_protocol'] == 'fc_nvme'
+
+
+def test_derive_fc_protocol_empty():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'protocols': [],
+ }
+ my_obj = create_module(interface_module, DEFAULT_ARGS, module_args)
+ assert my_obj.derive_fc_data_protocol() is None
+
+
+def test_negative_derive_fc_protocol_nvme():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'protocols': ['fc-nvme', 'fcp'],
+ }
+ my_obj = create_module(interface_module, DEFAULT_ARGS, module_args)
+ msg = "A single protocol entry is expected for FC interface, got ['fc-nvme', 'fcp']."
+ assert msg in expect_and_capture_ansible_exception(my_obj.derive_fc_data_protocol, 'fail')['msg']
+
+
+def test_negative_derive_fc_protocol_nvme_mismatch():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'protocols': ['fc-nvme'],
+ 'data_protocol': 'fcp'
+ }
+ my_obj = create_module(interface_module, DEFAULT_ARGS, module_args)
+ msg = "Error: mismatch between configured data_protocol: fcp and data_protocols: ['fc-nvme']"
+ assert msg in expect_and_capture_ansible_exception(my_obj.derive_fc_data_protocol, 'fail')['msg']
+
+
+def test_negative_derive_fc_protocol_unexpected():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'protocols': ['fc-unknown'],
+ 'data_protocol': 'fcp'
+ }
+ my_obj = create_module(interface_module, DEFAULT_ARGS, module_args)
+ msg = "Unexpected protocol value fc-unknown."
+ assert msg in expect_and_capture_ansible_exception(my_obj.derive_fc_data_protocol, 'fail')['msg']
+
+
+def test_derive_interface_type_nvme():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'protocols': ['fc-nvme'],
+ }
+ my_obj = create_module(interface_module, DEFAULT_ARGS, module_args)
+ my_obj.derive_interface_type()
+ assert my_obj.parameters['interface_type'] == 'fc'
+
+
+def test_derive_interface_type_iscsi():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'protocols': ['iscsi'],
+ }
+ my_obj = create_module(interface_module, DEFAULT_ARGS, module_args)
+ my_obj.derive_interface_type()
+ assert my_obj.parameters['interface_type'] == 'ip'
+
+
+def test_derive_interface_type_cluster():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'role': 'cluster',
+ }
+ my_obj = create_module(interface_module, DEFAULT_ARGS, module_args)
+ my_obj.derive_interface_type()
+ assert my_obj.parameters['interface_type'] == 'ip'
+
+
+def test_negative_derive_interface_type_nvme_mismatch():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ])
+ msg = "Error: mismatch between configured interface_type: ip and derived interface_type: fc."
+ module_args = {
+ 'use_rest': 'always',
+ 'protocols': ['fc-nvme'],
+ 'interface_type': 'ip'
+ }
+ my_obj = create_module(interface_module, DEFAULT_ARGS, module_args)
+ assert msg in expect_and_capture_ansible_exception(my_obj.derive_interface_type, 'fail')['msg']
+
+
+def test_negative_derive_interface_type_unknown():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ])
+ msg = "Error: unable to determine interface type, please set interface_type: unexpected value(s) for protocols: ['unexpected']"
+ module_args = {
+ 'use_rest': 'always',
+ 'protocols': ['unexpected'],
+ }
+ my_obj = create_module(interface_module, DEFAULT_ARGS, module_args)
+ assert msg in expect_and_capture_ansible_exception(my_obj.derive_interface_type, 'fail')['msg']
+
+
+def test_negative_derive_interface_type_multiple():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ])
+ msg = "Error: unable to determine interface type, please set interface_type: incompatible value(s) for protocols: ['fc-nvme', 'cifs']"
+ module_args = {
+ 'use_rest': 'always',
+ 'protocols': ['fc-nvme', 'cifs'],
+ }
+ my_obj = create_module(interface_module, DEFAULT_ARGS, module_args)
+ assert msg in expect_and_capture_ansible_exception(my_obj.derive_interface_type, 'fail')['msg']
+
+
+def test_derive_block_file_type_fcp():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ }
+ my_obj = create_module(interface_module, DEFAULT_ARGS, module_args)
+ block_p, file_p, fcp = my_obj.derive_block_file_type(['fcp'])
+ assert block_p
+ assert not file_p
+ assert fcp
+ module_args['interface_type'] = 'fc'
+ my_obj = create_module(interface_module, DEFAULT_ARGS, module_args)
+ block_p, file_p, fcp = my_obj.derive_block_file_type(None)
+ assert block_p
+ assert not file_p
+ assert fcp
+
+
+def test_derive_block_file_type_iscsi():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ }
+ my_obj = create_module(interface_module, DEFAULT_ARGS, module_args)
+ block_p, file_p, fcp = my_obj.derive_block_file_type(['iscsi'])
+ assert block_p
+ assert not file_p
+ assert not fcp
+
+
+def test_derive_block_file_type_cifs():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ }
+ my_obj = create_module(interface_module, DEFAULT_ARGS, module_args)
+ block_p, file_p, fcp = my_obj.derive_block_file_type(['cifs'])
+ assert not block_p
+ assert file_p
+ assert not fcp
+
+
+def test_derive_block_file_type_mixed():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ }
+ my_obj = create_module(interface_module, DEFAULT_ARGS, module_args)
+ error = "Cannot use any of ['fcp'] with ['cifs']"
+ assert expect_and_capture_ansible_exception(my_obj.derive_block_file_type, 'fail', ['cifs', 'fcp'])['msg'] == error
+
+
+def test_map_failover_policy():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'failover_policy': 'local-only',
+ }
+ my_obj = create_module(interface_module, DEFAULT_ARGS, module_args)
+ my_obj.map_failover_policy()
+ assert my_obj.parameters['failover_scope'] == 'home_node_only'
+
+
+def test_rest_negative_unsupported_zapi_option_fail():
+ ''' create cluster '''
+ register_responses([
+ ])
+ msg = "REST API currently does not support 'is_ipv4_link_local'"
+ module_args = {
+ 'use_rest': 'always',
+ 'ipspace': 'cluster',
+ 'is_ipv4_link_local': True,
+ }
+ assert msg in create_module(interface_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_rest_negative_rest_only_option():
+ ''' create cluster '''
+ register_responses([
+ ])
+ msg = "probe_port requires REST."
+ module_args = {
+ 'use_rest': 'never',
+ 'ipspace': 'cluster',
+ 'probe_port': 65431,
+ }
+ assert msg in create_module(interface_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_rest_negative_unsupported_zapi_option_force_zapi_1():
+ ''' create cluster '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ])
+ msg = "missing required argument with ZAPI: vserver"
+ module_args = {
+ 'use_rest': 'auto',
+ 'ipspace': 'cluster',
+ 'is_ipv4_link_local': True,
+ }
+ assert msg in create_module(interface_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.has_netapp_lib')
+def test_rest_negative_unsupported_zapi_option_force_zapi_2(mock_netapp_lib):
+ ''' create cluster '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ])
+ mock_netapp_lib.return_value = False
+ msg = "the python NetApp-Lib module is required"
+ module_args = {
+ 'use_rest': 'auto',
+ 'ipspace': 'cluster',
+ 'is_ipv4_link_local': True,
+ }
+ assert msg in create_module(interface_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_rest_negative_unsupported_rest_version():
+ ''' create cluster '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ])
+ msg = "Error: REST requires ONTAP 9.7 or later for interface APIs."
+ module_args = {'use_rest': 'always'}
+ assert msg == create_module(interface_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_rest_auto_falls_back_to_zapi_if_ip_9_6():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96'])
+ ])
+ module_args = {'use_rest': 'auto'}
+ # vserver is a required parameter with ZAPI
+ msg = "missing required argument with ZAPI: vserver"
+ assert msg in create_module(interface_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ print_warnings
+ assert_warning_was_raised('Falling back to ZAPI: REST requires ONTAP 9.7 or later for interface APIs.')
+
+
+def test_fix_errors():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97'])
+ ])
+ module_args = {'use_rest': 'auto'}
+ my_obj = create_module(interface_module, DEFAULT_ARGS, module_args)
+ control = {'xx': 11, 'yy': 22}
+ # no role in error
+ errors = dict(control)
+ assert my_obj.fix_errors(None, errors) is None
+ assert errors == control
+ # role/firewall_policy/protocols/service_policy -> service_policy
+ tests = [
+ ('data', 'data', ['nfs'], None, 'default-data-files', True),
+ ('data', 'data', ['cifs'], None, 'default-data-files', True),
+ ('data', 'data', ['iscsi'], None, 'default-data-blocks', True),
+ ('data', '', ['fc-nvme'], None, 'unchanged', True),
+ ('data', 'mgmt', ['ignored'], None, 'default-management', True),
+ ('data', '', ['nfs'], None, 'default-data-files', True),
+ ('data', '', ['cifs'], None, 'default-data-files', True),
+ ('data', '', ['iscsi'], None, 'default-data-blocks', True),
+ ('data', 'mgmt', ['ignored'], None, 'default-management', True),
+ ('intercluster', 'intercluster', ['ignored'], None, 'default-intercluster', True),
+ ('intercluster', '', ['ignored'], None, 'default-intercluster', True),
+ ('cluster', 'mgmt', ['ignored'], None, 'default-cluster', True),
+ ('cluster', '', ['ignored'], None, 'default-cluster', True),
+ ('cluster', 'other', ['ignored'], None, 'unchanged', False),
+ ]
+ for role, firewall_policy, protocols, service_policy, expected_service_policy, fixed in tests:
+ my_obj.parameters['protocols'] = protocols
+ if service_policy:
+ my_obj['service_policy'] = service_policy
+ options = {'service_policy': 'unchanged'}
+ errors = dict(control)
+ errors['role'] = role
+ if firewall_policy:
+ errors['firewall_policy'] = firewall_policy
+ assert my_obj.fix_errors(options, errors) is None
+ print('OPTIONS', options)
+ assert 'service_policy' in options
+ assert options['service_policy'] == expected_service_policy
+ assert errors == control or not fixed
+ assert fixed or 'role' in errors
+
+
+def test_error_messages_get_interface_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'network/ip/interfaces', SRR['two_records']), # get IP
+ ('GET', 'cluster/nodes', SRR['generic_error']), # get nodes
+ # second call
+ ('GET', 'network/ip/interfaces', SRR['one_record_vserver']), # get IP
+ ('GET', 'network/fc/interfaces', SRR['generic_error']), # get FC
+ # third call
+ ('GET', 'network/ip/interfaces', SRR['generic_error']), # get IP
+ ('GET', 'network/fc/interfaces', SRR['one_record_vserver']), # get FC
+ # fourth call
+ ('GET', 'network/ip/interfaces', SRR['generic_error']), # get IP
+ ('GET', 'network/fc/interfaces', SRR['generic_error']), # get FC
+ # fifth call
+ ('GET', 'network/ip/interfaces', SRR['error_precluster']), # get IP
+ ])
+ module_args = {'use_rest': 'auto'}
+ my_obj = create_module(interface_module, DEFAULT_ARGS, module_args)
+ # first call
+ error = 'Error fetching cluster node info'
+ assert expect_and_capture_ansible_exception(my_obj.get_interface_rest, 'fail', 'my_lif')['msg'] == rest_error_message(error, 'cluster/nodes')
+ # second call
+ # reset value, as it was set for ip
+ del my_obj.parameters['interface_type']
+ my_obj.parameters['vserver'] = 'not_cluster'
+ assert my_obj.get_interface_rest('my_lif') is not None
+ # third call
+ # reset value, as it was set for ip
+ del my_obj.parameters['interface_type']
+ my_obj.parameters['vserver'] = 'not_cluster'
+ assert my_obj.get_interface_rest('my_lif') is not None
+ # fourth call
+ # reset value, as it was set for fc
+ del my_obj.parameters['interface_type']
+ error = expect_and_capture_ansible_exception(my_obj.get_interface_rest, 'fail', 'my_lif')['msg']
+ assert rest_error_message('Error fetching interface details for my_lif', 'network/ip/interfaces') in error
+ assert rest_error_message('', 'network/fc/interfaces') in error
+ # fifth call
+ error = 'This module cannot use REST in precluster mode, ZAPI can be forced with use_rest: never.'
+ assert error in expect_and_capture_ansible_exception(my_obj.get_interface_rest, 'fail', 'my_lif')['msg']
+
+
+def test_error_messages_rest_find_interface():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'cluster/nodes', SRR['nodes_two_records']), # get nodes
+ ])
+ module_args = {'use_rest': 'auto'}
+ my_obj = create_module(interface_module, DEFAULT_ARGS, module_args)
+ # no calls
+ # no interface type
+ error = 'Error: missing option "interface_type (or could not be derived)'
+ assert error in expect_and_capture_ansible_exception(my_obj.get_net_int_api, 'fail')['msg']
+ # multiple records for cluster
+ records = [
+ {'name': 'node_name'},
+ {'name': 'node_name'}
+ ]
+ error = 'Error: multiple records for: node_name - %s' % records
+ assert error in expect_and_capture_ansible_exception(my_obj.find_interface_record, 'fail', records, 'node', 'name')['msg']
+ # multiple records with vserver
+ records = [1, 2]
+ my_obj.parameters['vserver'] = 'vserver'
+ error = 'Error: unexpected records for name: name, vserver: vserver - [1, 2]'
+ assert error in expect_and_capture_ansible_exception(my_obj.find_exact_match, 'fail', records, 'name')['msg']
+ # multiple records with ambiguity, home_node set (warn)
+ del my_obj.parameters['vserver']
+ my_obj.parameters['home_node'] = 'node'
+ records = [
+ {'name': 'node_name'},
+ {'name': 'node_name'}
+ ]
+ error = 'Error: multiple records for: node_name - %s' % records
+ assert error in expect_and_capture_ansible_exception(my_obj.find_exact_match, 'fail', records, 'name')['msg']
+ records = [
+ {'name': 'node_name'},
+ {'name': 'name'}
+ ]
+ record = my_obj.find_exact_match(records, 'name')
+ assert record == {'name': 'node_name'}
+ assert_warning_was_raised("Found both ['name', 'node_name'], selecting node_name")
+ # fifth call (get nodes, cached)
+ # multiple records with different home nodes
+ del my_obj.parameters['home_node']
+ records = [
+ {'name': 'node2_name'},
+ {'name': 'node3_name'}
+ ]
+ error = "Error: multiple matches for name: name: ['node2_name', 'node3_name']. Set home_node parameter."
+ assert error in expect_and_capture_ansible_exception(my_obj.find_exact_match, 'fail', records, 'name')['msg']
+ # multiple records with home node and no home node
+ records = [
+ {'name': 'node2_name'},
+ {'name': 'name'}
+ ]
+ error = "Error: multiple matches for name: name: ['name', 'node2_name']. Set home_node parameter."
+ assert error in expect_and_capture_ansible_exception(my_obj.find_exact_match, 'fail', records, 'name')['msg']
+ # sixth call
+ error = "Error: multiple matches for name: name: ['name', 'node2_name']. Set home_node parameter."
+ assert error in expect_and_capture_ansible_exception(my_obj.find_exact_match, 'fail', records, 'name')['msg']
+
+
+def test_error_messages_rest_misc():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('POST', 'network/type/interfaces', SRR['generic_error']),
+ ('PATCH', 'network/type/interfaces/uuid', SRR['generic_error']),
+ ('DELETE', 'network/type/interfaces/uuid', SRR['generic_error']),
+ ])
+ module_args = {'use_rest': 'auto'}
+ my_obj = create_module(interface_module, DEFAULT_ARGS, module_args)
+ # no calls
+ # no interface type
+ error = 'Error, expecting uuid in existing record'
+ assert error in expect_and_capture_ansible_exception(my_obj.build_rest_payloads, 'fail', 'delete', {}, {})['msg']
+ my_obj.parameters['interface_type'] = 'type'
+ error = rest_error_message('Error creating interface abc_if', 'network/type/interfaces')
+ assert error in expect_and_capture_ansible_exception(my_obj.create_interface_rest, 'fail', {})['msg']
+ error = rest_error_message('Error modifying interface abc_if', 'network/type/interfaces/uuid')
+ assert error in expect_and_capture_ansible_exception(my_obj.modify_interface_rest, 'fail', 'uuid', {'xxx': 'yyy'})['msg']
+ error = rest_error_message('Error deleting interface abc_if', 'network/type/interfaces/uuid')
+ assert error in expect_and_capture_ansible_exception(my_obj.delete_interface_rest, 'fail', 'uuid')['msg']
+
+
+def test_error_messages_build_rest_body_and_validations():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ])
+ module_args = {'use_rest': 'always'}
+ my_obj = create_module(interface_module, DEFAULT_ARGS, module_args)
+ my_obj.parameters['home_node'] = 'node1'
+ my_obj.parameters['protocols'] = ['nfs']
+ my_obj.parameters['role'] = 'intercluster'
+ error = 'Error: Missing one or more required parameters for creating interface: interface_type.'
+ assert error in expect_and_capture_ansible_exception(my_obj.build_rest_body, 'fail')['msg']
+ my_obj.parameters['interface_type'] = 'type'
+ error = 'Error: unexpected value for interface_type: type.'
+ assert error in expect_and_capture_ansible_exception(my_obj.build_rest_body, 'fail')['msg']
+ my_obj.parameters['interface_type'] = 'ip'
+ my_obj.parameters['ipspace'] = 'ipspace'
+ error = 'Error: Protocol cannot be specified for intercluster role, failed to create interface.'
+ assert error in expect_and_capture_ansible_exception(my_obj.build_rest_body, 'fail')['msg']
+ del my_obj.parameters['protocols']
+ my_obj.parameters['interface_type'] = 'fc'
+ error = "Error: 'home_port' is not supported for FC interfaces with 9.7, use 'current_port', avoid home_node."
+ assert error in expect_and_capture_ansible_exception(my_obj.build_rest_body, 'fail')['msg']
+ print_warnings()
+ assert_warning_was_raised("Avoid 'home_node' with FC interfaces with 9.7, use 'current_node'.")
+ del my_obj.parameters['home_port']
+ error = "Error: A data 'vserver' is required for FC interfaces."
+ assert error in expect_and_capture_ansible_exception(my_obj.build_rest_body, 'fail')['msg']
+ my_obj.parameters['current_port'] = '0a'
+ my_obj.parameters['data_protocol'] = 'fc'
+ my_obj.parameters['force_subnet_association'] = True
+ my_obj.parameters['failover_group'] = 'failover_group'
+ my_obj.parameters['vserver'] = 'vserver'
+ error = "Error: 'role' is deprecated, and 'data' is the only value supported for FC interfaces: found intercluster."
+ assert error in expect_and_capture_ansible_exception(my_obj.build_rest_body, 'fail')['msg']
+ my_obj.parameters['role'] = 'data'
+ error = "Error creating interface, unsupported options: {'failover_group': 'failover_group'}"
+ assert error in expect_and_capture_ansible_exception(my_obj.build_rest_body, 'fail')['msg']
+ del my_obj.parameters['failover_group']
+ my_obj.parameters['broadcast_domain'] = 'BDD1'
+ error = "Error: broadcast_domain option only supported for IP interfaces: abc_if, interface_type: fc"
+ assert error in expect_and_capture_ansible_exception(my_obj.build_rest_body, 'fail', None)['msg']
+ my_obj.parameters['service_policy'] = 'svc_pol'
+ error = "Error: 'service_policy' is not supported for FC interfaces."
+ assert error in expect_and_capture_ansible_exception(my_obj.build_rest_body, 'fail', None)['msg']
+ del my_obj.parameters['service_policy']
+ my_obj.parameters['probe_port'] = 65431
+ error = "Error: 'probe_port' is not supported for FC interfaces."
+ assert error in expect_and_capture_ansible_exception(my_obj.build_rest_body, 'fail', None)['msg']
+ print_warnings()
+ assert_warning_was_raised('Ignoring force_subnet_association')
+ my_obj.parameters['interface_type'] = 'ip'
+ del my_obj.parameters['vserver']
+ del my_obj.parameters['ipspace']
+ error = 'Error: ipspace name must be provided if scope is cluster, or vserver for svm scope.'
+ assert error in expect_and_capture_ansible_exception(my_obj.build_rest_body, 'fail')['msg']
+ modify = {'ipspace': 'ipspace'}
+ error = "The following option cannot be modified: ipspace.name"
+ assert error in expect_and_capture_ansible_exception(my_obj.build_rest_body, 'fail', modify)['msg']
+ del my_obj.parameters['role']
+ my_obj.parameters['current_port'] = 'port1'
+ my_obj.parameters['home_port'] = 'port1'
+ my_obj.parameters['ipspace'] = 'ipspace'
+ error = "Error: home_port and broadcast_domain are mutually exclusive for creating: abc_if"
+ assert error in expect_and_capture_ansible_exception(my_obj.build_rest_body, 'fail', None)['msg']
+
+
+def test_dns_domain_ddns_enabled():
+ ''' domain and ddns enabled option test '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'network/ip/interfaces', SRR['zero_records']),
+ ('GET', 'cluster/nodes', SRR['nodes']),
+ ('POST', 'network/ip/interfaces', SRR['success']),
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'network/ip/interfaces', SRR['one_record_vserver']),
+ ('GET', 'cluster/nodes', SRR['nodes']),
+ ('PATCH', 'network/ip/interfaces/54321', SRR['success']),
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'network/fc/interfaces', SRR['zero_records']),
+ ('GET', 'cluster', SRR['is_rest_9_9_0'])
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'address': '10.11.12.13',
+ 'netmask': '255.192.0.0',
+ 'vserver': 'vserver',
+ 'dns_domain_name': 'netapp1.com',
+ 'is_dns_update_enabled': False
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ del module_args['address']
+ del module_args['netmask']
+ args = {'data_protocol': 'fc_nvme', 'home_node': 'my_node', 'protocols': 'fc-nvme', 'interface_type': 'fc'}
+ module_args.update(args)
+ assert 'dns_domain_name, is_dns_update_enabled options only supported for IP interfaces' in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert 'Error: Minimum version of ONTAP for is_dns_update_enabled is (9, 9, 1).' in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_subnet_name():
+ ''' domain and ddns enabled option test '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_1']),
+ ('GET', 'network/ip/interfaces', SRR['zero_records']),
+ ('GET', 'cluster/nodes', SRR['nodes']),
+ ('POST', 'network/ip/interfaces', SRR['success']),
+ # idemptocency
+ ('GET', 'cluster', SRR['is_rest_9_11_1']),
+ ('GET', 'network/ip/interfaces', SRR['one_record_vserver_subnet1']),
+ # modify subnet
+ ('GET', 'cluster', SRR['is_rest_9_11_1']),
+ ('GET', 'network/ip/interfaces', SRR['one_record_vserver_subnet1']),
+ ('GET', 'cluster/nodes', SRR['nodes']),
+ ('PATCH', 'network/ip/interfaces/54321', SRR['success']),
+ # error cases
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'cluster', SRR['is_rest_9_11_1']),
+ ('GET', 'network/fc/interfaces', SRR['zero_records']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'ipspace': 'Default',
+ 'subnet_name': 'subnet1',
+ 'vserver': 'vserver',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ module_args['subnet_name'] = 'subnet2'
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert 'Minimum version of ONTAP for subnet_name is (9, 11, 1)' in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ args = {'data_protocol': 'fc_nvme', 'home_node': 'my_node', 'protocols': 'fc-nvme', 'interface_type': 'fc'}
+ module_args.update(args)
+ assert 'subnet_name option only supported for IP interfaces' in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert_warning_was_raised('ipspace is ignored for FC interfaces.')
+
+
+def test_fail_if_subnet_conflicts():
+ ''' domain and ddns enabled option test '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_1']),
+ ('GET', 'network/ip/interfaces', SRR['zero_records']),
+ ('GET', 'cluster/nodes', SRR['nodes']),
+ ('POST', 'network/ip/interfaces', SRR['success']),
+ # idemptocency
+ ('GET', 'cluster', SRR['is_rest_9_11_1']),
+ ('GET', 'network/ip/interfaces', SRR['one_record_vserver']),
+ # modify subnet
+ ('GET', 'cluster', SRR['is_rest_9_11_1']),
+ ('GET', 'network/ip/interfaces', SRR['one_record_vserver']),
+ ('GET', 'cluster/nodes', SRR['nodes']),
+ ('PATCH', 'network/ip/interfaces/54321', SRR['success']),
+ # error cases
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'cluster', SRR['is_rest_9_11_1']),
+ ('GET', 'network/fc/interfaces', SRR['zero_records']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'ipspace': 'Default',
+ 'fail_if_subnet_conflicts': False,
+ 'vserver': 'vserver',
+ 'address': '10.11.12.13',
+ 'netmask': '255.192.0.0',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ module_args['address'] = '10.11.12.14'
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert 'Minimum version of ONTAP for fail_if_subnet_conflicts is (9, 11, 1)' in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ args = {'data_protocol': 'fc_nvme', 'home_node': 'my_node', 'protocols': 'fc-nvme', 'interface_type': 'fc'}
+ module_args.update(args)
+ assert 'fail_if_subnet_conflicts option only supported for IP interfaces' in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert_warning_was_raised('ipspace is ignored for FC interfaces.')
+
+
+def check_options(my_obj, parameters, exp_options, exp_migrate_options, exp_errors):
+ options, migrate_options, errors = my_obj.set_options_rest(parameters)
+ assert options == exp_options
+ assert migrate_options == exp_migrate_options
+ assert errors == exp_errors
+
+
+def test_set_options_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ # ('GET', 'cluster/nodes', SRR['nodes']),
+ ])
+ module_args = {'use_rest': 'always'}
+ my_obj = create_module(interface_module, DEFAULT_ARGS, module_args)
+ parameters = None
+ my_obj.parameters = {
+ 'interface_type': 'other'
+ }
+ check_options(my_obj, parameters, {}, {}, {})
+ # unknown modify options
+ check_options(my_obj, {'x': 'y'}, {}, {}, {})
+ # valid options
+ my_obj.parameters = {
+ 'interface_type': 'ip',
+ 'fail_if_subnet_conflicts': False
+ }
+ check_options(my_obj, parameters, {'fail_if_subnet_conflicts': False}, {}, {})
+ check_options(my_obj, {'subnet_name': 'subnet1'}, {'subnet.name': 'subnet1'}, {}, {})
+ my_obj.parameters['home_node'] = 'node1'
+ check_options(my_obj, {'home_node': 'node1', 'home_port': 'port1'}, {'location': {'home_port': {'name': 'port1', 'node': {'name': 'node1'}}}}, {}, {})
+ my_obj.parameters['current_node'] = 'node1'
+ check_options(my_obj, {'current_node': 'node1', 'current_port': 'port1'}, {}, {'location': {'port': {'name': 'port1', 'node': {'name': 'node1'}}}}, {})
+
+
+def test_not_throw_warnings_in_rename():
+ ''' assert no warnings raised during rename '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'network/ip/interfaces', SRR['zero_records']),
+ ('GET', 'network/ip/interfaces', SRR['one_record_vserver']),
+ ('GET', 'cluster/nodes', SRR['nodes']),
+ ('PATCH', 'network/ip/interfaces/54321', SRR['success']),
+ ])
+ module_args = {
+ "from_name": "abc_if",
+ "interface_name": "abc_if_update",
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert_no_warnings()
+
+
+def test_throw_warnings_modify_rename():
+ ''' assert warnings raised when interface_name does not have node name in it. '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'network/ip/interfaces', SRR['one_record_home_node']),
+ ('GET', 'cluster/nodes', SRR['nodes'])
+ ])
+ assert not call_main(my_main, DEFAULT_ARGS)['changed']
+ print_warnings()
+ # current record name is 'node2_abc_if' and interface_name does not have node name in it.
+ # adjust to avoid rename attempt.
+ assert_warning_was_raised('adjusting name from abc_if to node2_abc_if')
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ipspace.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ipspace.py
new file mode 100644
index 000000000..9a23f06b9
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ipspace.py
@@ -0,0 +1,189 @@
+# (c) 2018, NTT Europe Ltd.
+# (c) 2023, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+""" unit test for Ansible module: na_ontap_ipspace """
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, patch_ansible, create_module, create_and_apply, expect_and_capture_ansible_exception
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke,\
+ register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_ipspace \
+ import NetAppOntapIpspace as my_module # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+DEFAULT_ARGS = {
+ "hostname": "10.10.10.10",
+ "username": "admin",
+ "password": "netapp1!",
+ "validate_certs": "no",
+ "https": "yes",
+ "state": "present",
+ "name": "test_ipspace"
+}
+
+
+ipspace_info = {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'net-ipspaces-info': {
+ 'ipspace': 'test_ipspace'
+ }
+ }
+}
+
+ipspace_info_renamed = {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'net-ipspaces-info': {
+ 'ipspace': 'test_ipspace_renamed'
+ }
+ }
+}
+
+ZRR = zapi_responses({
+ 'ipspace_info': build_zapi_response(ipspace_info),
+ 'ipspace_info_renamed': build_zapi_response(ipspace_info_renamed),
+})
+
+SRR = rest_responses({
+ 'ipspace_record': (200, {'records': [{
+ "name": "test_ipspace",
+ "uuid": "1cd8a442-86d1-11e0-ae1c-123478563412"}]}, None),
+ 'ipspace_record_renamed': (200, {'records': [{
+ "name": "test_ipspace_renamed",
+ "uuid": "1cd8a442-86d1-11e0-ae1c-123478563412"}]}, None)
+})
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ my_module()
+ msg = 'missing required arguments:'
+ assert msg in exc.value.args[0]['msg']
+
+
+def test_get_ipspace_iscalled():
+ ''' test if get_ipspace() is called '''
+ register_responses([
+ ('net-ipspaces-get-iter', ZRR['empty'])
+ ])
+ ipsace_obj = create_module(my_module, DEFAULT_ARGS, {'use_rest': 'never'})
+ result = ipsace_obj.get_ipspace('dummy')
+ assert result is None
+
+
+def test_ipspace_apply_iscalled():
+ ''' test if apply() is called - create and rename'''
+ register_responses([
+ # create
+ ('net-ipspaces-get-iter', ZRR['empty']),
+ ('net-ipspaces-create', ZRR['success']),
+ # create idempotent check
+ ('net-ipspaces-get-iter', ZRR['ipspace_info']),
+ # rename
+ ('net-ipspaces-get-iter', ZRR['empty']),
+ ('net-ipspaces-get-iter', ZRR['ipspace_info']),
+ ('net-ipspaces-rename', ZRR['success']),
+ # rename idempotent check
+ ('net-ipspaces-get-iter', ZRR['ipspace_info_renamed']),
+ # delete
+ ('net-ipspaces-get-iter', ZRR['ipspace_info']),
+ ('net-ipspaces-destroy', ZRR['success'])
+ ])
+ args = {'use_rest': 'never'}
+ assert create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+ args['from_name'] = 'test_ipspace'
+ args['name'] = 'test_ipspace_renamed'
+ assert create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+ args = {'use_rest': 'never', 'state': 'absent'}
+ assert create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_successful_create_rest():
+ ''' Test successful create and idempotent check'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'network/ipspaces', SRR['empty_records']),
+ ('POST', 'network/ipspaces', SRR['success']),
+ # idempotent
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'network/ipspaces', SRR['ipspace_record'])
+ ])
+ assert create_and_apply(my_module, DEFAULT_ARGS)['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS)['changed']
+
+
+def test_successful_delete_rest():
+ ''' Test successful delete and idempotent check'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'network/ipspaces', SRR['ipspace_record']),
+ ('DELETE', 'network/ipspaces/1cd8a442-86d1-11e0-ae1c-123478563412', SRR['success']),
+ # idempotent
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'network/ipspaces', SRR['empty_records'])
+ ])
+ assert create_and_apply(my_module, DEFAULT_ARGS, {'state': 'absent'})['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS, {'state': 'absent'})['changed']
+
+
+def test_successful_rename_rest():
+ ''' Test successful rename and idempotent check'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'network/ipspaces', SRR['empty_records']),
+ ('GET', 'network/ipspaces', SRR['ipspace_record']),
+ ('PATCH', 'network/ipspaces/1cd8a442-86d1-11e0-ae1c-123478563412', SRR['success']),
+ # idempotent
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'network/ipspaces', SRR['ipspace_record_renamed'])
+ ])
+ args = {'from_name': 'test_ipspace', 'name': 'test_ipspace_renamed'}
+ assert create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_if_all_methods_catch_exception_zapi_rest():
+ register_responses([
+ # zapi
+ ('net-ipspaces-get-iter', ZRR['error']),
+ ('net-ipspaces-create', ZRR['error']),
+ ('net-ipspaces-rename', ZRR['error']),
+ ('net-ipspaces-destroy', ZRR['error']),
+ # REST
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'network/ipspaces', SRR['generic_error']),
+ ('POST', 'network/ipspaces', SRR['generic_error']),
+ ('PATCH', 'network/ipspaces/abdcdef', SRR['generic_error']),
+ ('DELETE', 'network/ipspaces/abdcdef', SRR['generic_error'])
+
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS, {'from_name': 'test_ipspace_rename', 'use_rest': 'never'})
+ assert 'Error getting ipspace' in expect_and_capture_ansible_exception(my_obj.get_ipspace, 'fail')['msg']
+ assert 'Error provisioning ipspace' in expect_and_capture_ansible_exception(my_obj.create_ipspace, 'fail')['msg']
+ assert 'Error renaming ipspace' in expect_and_capture_ansible_exception(my_obj.rename_ipspace, 'fail')['msg']
+ assert 'Error removing ipspace' in expect_and_capture_ansible_exception(my_obj.delete_ipspace, 'fail')['msg']
+
+ my_obj = create_module(my_module, DEFAULT_ARGS, {'from_name': 'test_ipspace_rename'})
+ my_obj.uuid = 'abdcdef'
+ assert 'Error getting ipspace' in expect_and_capture_ansible_exception(my_obj.get_ipspace, 'fail')['msg']
+ assert 'Error provisioning ipspace' in expect_and_capture_ansible_exception(my_obj.create_ipspace, 'fail')['msg']
+ assert 'Error renaming ipspace' in expect_and_capture_ansible_exception(my_obj.rename_ipspace, 'fail')['msg']
+ assert 'Error removing ipspace' in expect_and_capture_ansible_exception(my_obj.delete_ipspace, 'fail')['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_iscsi.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_iscsi.py
new file mode 100644
index 000000000..4d0a53fda
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_iscsi.py
@@ -0,0 +1,339 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests for Ansible module: na_ontap_iscsi '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+import sys
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_iscsi \
+ import NetAppOntapISCSI as iscsi_module # module under test
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, patch_ansible, create_module, create_and_apply, expect_and_capture_ansible_exception
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke,\
+ register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_responses
+
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+
+DEFAULT_ARGS = {
+ "hostname": "10.10.10.10",
+ "username": "admin",
+ "password": "netapp1!",
+ "validate_certs": "no",
+ "https": "yes",
+ "state": "present",
+ "use_rest": "never",
+ "vserver": "svm1",
+ "service_state": "started"
+}
+
+
+iscsi_info_started = {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'iscsi-service-info': {
+ 'is-available': 'true',
+ 'vserver': 'svm1'
+ }
+ }
+}
+
+iscsi_info_stopped = {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'iscsi-service-info': {
+ 'is-available': 'false',
+ 'vserver': 'svm1'
+ }
+ }
+}
+
+ZRR = zapi_responses({
+ 'iscsi_started': build_zapi_response(iscsi_info_started),
+ 'iscsi_stopped': build_zapi_response(iscsi_info_stopped)
+})
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ iscsi_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+
+def test_get_nonexistent_iscsi():
+ register_responses([
+ ('iscsi-service-get-iter', ZRR['empty'])
+ ])
+ iscsi_obj = create_module(iscsi_module, DEFAULT_ARGS)
+ result = iscsi_obj.get_iscsi()
+ assert not result
+
+
+def test_get_existing_iscsi():
+ register_responses([
+ ('iscsi-service-get-iter', ZRR['iscsi_started'])
+ ])
+ iscsi_obj = create_module(iscsi_module, DEFAULT_ARGS)
+ result = iscsi_obj.get_iscsi()
+ assert result
+
+
+def test_successfully_create():
+ register_responses([
+ ('iscsi-service-get-iter', ZRR['empty']),
+ ('iscsi-service-create', ZRR['success'])
+ ])
+ assert create_and_apply(iscsi_module, DEFAULT_ARGS)['changed']
+
+
+def test_create_idempotency():
+ register_responses([
+ ('iscsi-service-get-iter', ZRR['iscsi_started'])
+ ])
+ assert create_and_apply(iscsi_module, DEFAULT_ARGS)['changed'] is False
+
+
+def test_successfully_create_stop_service():
+ register_responses([
+ ('iscsi-service-get-iter', ZRR['empty']),
+ ('iscsi-service-create', ZRR['success'])
+ ])
+ args = {'service_state': 'stopped'}
+ assert create_and_apply(iscsi_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_successfully_delete_when_service_started():
+ register_responses([
+ ('iscsi-service-get-iter', ZRR['iscsi_started']),
+ ('iscsi-service-stop', ZRR['success']),
+ ('iscsi-service-destroy', ZRR['success'])
+ ])
+ args = {'state': 'absent'}
+ assert create_and_apply(iscsi_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_delete_idempotent():
+ register_responses([
+ ('iscsi-service-get-iter', ZRR['empty'])
+ ])
+ args = {'state': 'absent'}
+ assert create_and_apply(iscsi_module, DEFAULT_ARGS, args)['changed'] is False
+
+
+def test_start_iscsi():
+ register_responses([
+ ('iscsi-service-get-iter', ZRR['iscsi_stopped']),
+ ('iscsi-service-start', ZRR['success'])
+ ])
+ assert create_and_apply(iscsi_module, DEFAULT_ARGS)['changed']
+
+
+def test_stop_iscsi():
+ register_responses([
+ ('iscsi-service-get-iter', ZRR['iscsi_started']),
+ ('iscsi-service-stop', ZRR['success'])
+ ])
+ args = {'service_state': 'stopped'}
+ assert create_and_apply(iscsi_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_if_all_methods_catch_exception():
+ register_responses([
+ ('iscsi-service-get-iter', ZRR['error']),
+ ('iscsi-service-create', ZRR['error']),
+ ('iscsi-service-start', ZRR['error']),
+ ('iscsi-service-stop', ZRR['error']),
+ ('iscsi-service-destroy', ZRR['error'])
+ ])
+
+ iscsi_obj = create_module(iscsi_module, DEFAULT_ARGS)
+
+ error = expect_and_capture_ansible_exception(iscsi_obj.get_iscsi, 'fail')['msg']
+ assert 'Error finding iscsi service in svm1: NetApp API failed. Reason - 12345:synthetic error for UT purpose' in error
+
+ error = expect_and_capture_ansible_exception(iscsi_obj.create_iscsi_service, 'fail')['msg']
+ assert 'Error creating iscsi service: NetApp API failed. Reason - 12345:synthetic error for UT purpose' in error
+
+ error = expect_and_capture_ansible_exception(iscsi_obj.start_iscsi_service, 'fail')['msg']
+ assert 'Error starting iscsi service on vserver svm1: NetApp API failed. Reason - 12345:synthetic error for UT purpose' in error
+
+ error = expect_and_capture_ansible_exception(iscsi_obj.stop_iscsi_service, 'fail')['msg']
+ assert 'Error Stopping iscsi service on vserver svm1: NetApp API failed. Reason - 12345:synthetic error for UT purpose' in error
+
+ error = expect_and_capture_ansible_exception(iscsi_obj.delete_iscsi_service, 'fail', {'service_state': 'stopped'})['msg']
+ assert 'Error deleting iscsi service on vserver svm1: NetApp API failed. Reason - 12345:synthetic error for UT purpose' in error
+
+
+SRR = rest_responses({
+ 'iscsi_started': (200, {"records": [
+ {
+ "svm": {"uuid": "d08434fae1-a8a8-11fg-aa26-005055fhs3e5"},
+ "enabled": True,
+ 'target': {'alias': 'ansibleSVM'}
+ }
+ ], "num_records": 1}, None),
+ 'iscsi_record': (200, {"records": [
+ {
+ "svm": {"uuid": "d08434fae1-a8a8-11fg-aa26-005055fhs3e5"},
+ "enabled": True,
+ 'target': {'alias': 'ansibleSVM'}
+ }
+ ], "num_records": 1}, None),
+ 'iscsi_stopped': (200, {"records": [
+ {
+ "svm": {"uuid": "d08434fae1-a8a8-11fg-aa26-005055fhs3e5"},
+ "enabled": False,
+ 'target': {'alias': 'ansibleSVM'}
+ }
+ ], "num_records": 1}, None),
+})
+
+
+ARGS_REST = {
+ "hostname": "10.10.10.10",
+ "username": "admin",
+ "password": "netapp1!",
+ "validate_certs": "no",
+ "https": "yes",
+ "state": "present",
+ "use_rest": "always",
+ "vserver": "svm1",
+ "service_state": "started",
+ "target_alias": "ansibleSVM"
+}
+
+
+def test_successfully_create_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'protocols/san/iscsi/services', SRR['empty_records']),
+ ('POST', 'protocols/san/iscsi/services', SRR['success'])
+ ])
+ assert create_and_apply(iscsi_module, ARGS_REST, {'use_rest': 'always'})['changed']
+
+
+def test_create_idempotency_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'protocols/san/iscsi/services', SRR['iscsi_started']),
+ ])
+ assert create_and_apply(iscsi_module, ARGS_REST, {'use_rest': 'always'})['changed'] is False
+
+
+def test_successfully_create_stop_service_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'protocols/san/iscsi/services', SRR['empty_records']),
+ ('POST', 'protocols/san/iscsi/services', SRR['success'])
+ ])
+ args = {'service_state': 'stopped'}
+ assert create_and_apply(iscsi_module, ARGS_REST, args)['changed']
+
+
+def test_successfully_delete_when_service_started_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'protocols/san/iscsi/services', SRR['iscsi_started']),
+ ('PATCH', 'protocols/san/iscsi/services/d08434fae1-a8a8-11fg-aa26-005055fhs3e5', SRR['success']),
+ ('DELETE', 'protocols/san/iscsi/services/d08434fae1-a8a8-11fg-aa26-005055fhs3e5', SRR['success']),
+ ])
+ args = {'state': 'absent'}
+ assert create_and_apply(iscsi_module, ARGS_REST, args)['changed']
+
+
+def test_delete_idempotent_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'protocols/san/iscsi/services', SRR['empty_records']),
+ ])
+ args = {'state': 'absent'}
+ assert create_and_apply(iscsi_module, ARGS_REST, args)['changed'] is False
+
+
+def test_start_iscsi_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'protocols/san/iscsi/services', SRR['iscsi_stopped']),
+ ('PATCH', 'protocols/san/iscsi/services/d08434fae1-a8a8-11fg-aa26-005055fhs3e5', SRR['success']),
+ ])
+ args = {'service_state': 'started'}
+ assert create_and_apply(iscsi_module, ARGS_REST, args)['changed']
+
+
+def test_modify_iscsi_target_alias_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'protocols/san/iscsi/services', SRR['iscsi_started']),
+ ('PATCH', 'protocols/san/iscsi/services/d08434fae1-a8a8-11fg-aa26-005055fhs3e5', SRR['success']),
+ ])
+ args = {"target_alias": "ansibleSVM_test"}
+ assert create_and_apply(iscsi_module, ARGS_REST, args)['changed']
+
+
+def test_modify_iscsi_target_alias_and_state_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'protocols/san/iscsi/services', SRR['iscsi_stopped']),
+ ('PATCH', 'protocols/san/iscsi/services/d08434fae1-a8a8-11fg-aa26-005055fhs3e5', SRR['success']),
+ ('PATCH', 'protocols/san/iscsi/services/d08434fae1-a8a8-11fg-aa26-005055fhs3e5', SRR['success']),
+ ])
+ args = {"target_alias": "ansibleSVM_test", 'service_state': 'started'}
+ assert create_and_apply(iscsi_module, ARGS_REST, args)['changed']
+
+
+def test_stop_iscsi_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'protocols/san/iscsi/services', SRR['iscsi_started']),
+ ('PATCH', 'protocols/san/iscsi/services/d08434fae1-a8a8-11fg-aa26-005055fhs3e5', SRR['success']),
+ ])
+ args = {'service_state': 'stopped'}
+ assert create_and_apply(iscsi_module, ARGS_REST, args)['changed']
+
+
+def test_if_all_methods_catch_exception_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'protocols/san/iscsi/services', SRR['generic_error']),
+ ('POST', 'protocols/san/iscsi/services', SRR['generic_error']),
+ ('PATCH', 'protocols/san/iscsi/services/d08434fae1-a8a8-11fg-aa26-005055fhs3e5', SRR['generic_error']),
+ ('PATCH', 'protocols/san/iscsi/services/d08434fae1-a8a8-11fg-aa26-005055fhs3e5', SRR['generic_error']),
+ ('DELETE', 'protocols/san/iscsi/services/d08434fae1-a8a8-11fg-aa26-005055fhs3e5', SRR['generic_error'])
+ ])
+
+ iscsi_obj = create_module(iscsi_module, ARGS_REST, {'use_rest': 'always'})
+ iscsi_obj.uuid = "d08434fae1-a8a8-11fg-aa26-005055fhs3e5"
+
+ error = expect_and_capture_ansible_exception(iscsi_obj.get_iscsi_rest, 'fail')['msg']
+ msg = 'Error finding iscsi service in svm1: calling: protocols/san/iscsi/services: got Expected error.'
+ assert msg in error
+
+ error = expect_and_capture_ansible_exception(iscsi_obj.create_iscsi_service_rest, 'fail')['msg']
+ msg = 'Error creating iscsi service: calling: protocols/san/iscsi/services: got Expected error.'
+ assert msg in error
+
+ error = expect_and_capture_ansible_exception(iscsi_obj.start_or_stop_iscsi_service_rest, 'fail', 'started')['msg']
+ msg = 'Error starting iscsi service on vserver svm1: calling: protocols/san/iscsi/services/d08434fae1-a8a8-11fg-aa26-005055fhs3e5: got Expected error.'
+ assert msg in error
+
+ error = expect_and_capture_ansible_exception(iscsi_obj.start_or_stop_iscsi_service_rest, 'fail', 'stopped')['msg']
+ msg = 'Error stopping iscsi service on vserver svm1: calling: protocols/san/iscsi/services/d08434fae1-a8a8-11fg-aa26-005055fhs3e5: got Expected error.'
+ assert msg in error
+
+ error = expect_and_capture_ansible_exception(iscsi_obj.delete_iscsi_service_rest, 'fail', {'service_state': 'stopped'})['msg']
+ msg = 'Error deleting iscsi service on vserver svm1: calling: protocols/san/iscsi/services/d08434fae1-a8a8-11fg-aa26-005055fhs3e5: got Expected error.'
+ assert msg in error
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_iscsi_security.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_iscsi_security.py
new file mode 100644
index 000000000..4cc168f2e
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_iscsi_security.py
@@ -0,0 +1,195 @@
+# (c) 2018-2023, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests for Ansible module: na_ontap_iscsi_security '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+import sys
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import patch_ansible, \
+ create_and_apply, create_module, expect_and_capture_ansible_exception, call_main, assert_warning_was_raised, print_warnings
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke, \
+ register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_iscsi_security \
+ import NetAppONTAPIscsiSecurity as iscsi_object, main as iscsi_module_main # module under test
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+
+# REST API canned responses when mocking send_request
+SRR = rest_responses({
+ 'get_uuid': (200, {"records": [{"uuid": "e2e89ccc-db35-11e9"}]}, None),
+ 'get_initiator': (200, {"records": [
+ {
+ "svm": {
+ "uuid": "e2e89ccc-db35-11e9",
+ "name": "test_ansible"
+ },
+ "initiator": "eui.0123456789abcdef",
+ "authentication_type": "chap",
+ "chap": {
+ "inbound": {
+ "user": "test_user_1"
+ },
+ "outbound": {
+ "user": "test_user_2"
+ }
+ },
+ "initiator_address": {
+ "ranges": [
+ {
+ "start": "10.125.10.0",
+ "end": "10.125.10.10",
+ "family": "ipv4"
+ },
+ {
+ "start": "10.10.10.7",
+ "end": "10.10.10.7",
+ "family": "ipv4"
+ }
+ ]
+ }
+ }], "num_records": 1}, None),
+ 'get_initiator_no_user': (200, {"records": [
+ {
+ "svm": {
+ "uuid": "e2e89ccc-db35-11e9",
+ "name": "test_ansible"
+ },
+ "initiator": "eui.0123456789abcdef",
+ "authentication_type": "chap",
+ "chap": {
+ },
+ "initiator_address": {
+ "ranges": [
+ ]
+ }
+ }], "num_records": 1}, None),
+ 'get_initiator_none': (200, {"records": [
+ {
+ "svm": {
+ "uuid": "e2e89ccc-db35-11e9",
+ "name": "test_ansible"
+ },
+ "initiator": "eui.0123456789abcdef",
+ "authentication_type": "none"
+ }], "num_records": 1}, None),
+})
+
+
+DEFAULT_ARGS = {
+ 'initiator': "eui.0123456789abcdef",
+ 'inbound_username': "test_user_1",
+ 'inbound_password': "123",
+ 'outbound_username': "test_user_2",
+ 'outbound_password': "321",
+ 'auth_type': "chap",
+ 'address_ranges': ["10.125.10.0-10.125.10.10", "10.10.10.7"],
+ 'hostname': 'test',
+ 'vserver': 'test_vserver',
+ 'username': 'test_user',
+ 'password': 'test_pass!'
+}
+
+
+def test_rest_successful_create():
+ '''Test successful rest create'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'svm/svms', SRR['get_uuid']),
+ ('GET', 'protocols/san/iscsi/credentials', SRR['zero_records']),
+ ('POST', 'protocols/san/iscsi/credentials', SRR['success']),
+ # idempotent check
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'svm/svms', SRR['get_uuid']),
+ ('GET', 'protocols/san/iscsi/credentials', SRR['get_initiator']),
+ ])
+ assert create_and_apply(iscsi_object, DEFAULT_ARGS)['changed']
+ assert not create_and_apply(iscsi_object, DEFAULT_ARGS)['changed']
+
+
+def test_rest_successful_modify_address():
+ '''Test successful rest modify'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'svm/svms', SRR['get_uuid']),
+ ('GET', 'protocols/san/iscsi/credentials', SRR['get_initiator']),
+ ('PATCH', 'protocols/san/iscsi/credentials/e2e89ccc-db35-11e9/eui.0123456789abcdef', SRR['success'])
+ ])
+ args = {'address_ranges': ['10.10.10.8']}
+ assert create_and_apply(iscsi_object, DEFAULT_ARGS, args)['changed']
+
+
+def test_rest_successful_modify_inbound_user():
+ '''Test successful rest modify'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'svm/svms', SRR['get_uuid']),
+ ('GET', 'protocols/san/iscsi/credentials', SRR['get_initiator']),
+ ('PATCH', 'protocols/san/iscsi/credentials/e2e89ccc-db35-11e9/eui.0123456789abcdef', SRR['success'])
+ ])
+ args = {'inbound_username': 'test_user_3'}
+ assert create_and_apply(iscsi_object, DEFAULT_ARGS, args)['changed']
+
+
+def test_rest_successful_modify_outbound_user():
+ '''Test successful rest modify'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'svm/svms', SRR['get_uuid']),
+ ('GET', 'protocols/san/iscsi/credentials', SRR['get_initiator']),
+ ('PATCH', 'protocols/san/iscsi/credentials/e2e89ccc-db35-11e9/eui.0123456789abcdef', SRR['success'])
+ ])
+ args = {'outbound_username': 'test_user_3'}
+ assert create_and_apply(iscsi_object, DEFAULT_ARGS, args)['changed']
+
+
+def test_rest_successful_modify_chap_no_user():
+ '''Test successful rest modify'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'svm/svms', SRR['get_uuid']),
+ ('GET', 'protocols/san/iscsi/credentials', SRR['get_initiator_no_user']),
+ ('PATCH', 'protocols/san/iscsi/credentials/e2e89ccc-db35-11e9/eui.0123456789abcdef', SRR['success'])
+ ])
+ assert create_and_apply(iscsi_object, DEFAULT_ARGS)['changed']
+
+
+def test_rest_successful_modify_chap():
+ '''Test successful rest modify'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'svm/svms', SRR['get_uuid']),
+ ('GET', 'protocols/san/iscsi/credentials', SRR['get_initiator_none']),
+ ('PATCH', 'protocols/san/iscsi/credentials/e2e89ccc-db35-11e9/eui.0123456789abcdef', SRR['success'])
+ ])
+ assert call_main(iscsi_module_main, DEFAULT_ARGS)['changed']
+
+
+def test_all_methods_catch_exception():
+ ''' test exception in get/create/modify/delete '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'svm/svms', SRR['get_uuid']),
+ ('GET', 'svm/svms', SRR['generic_error']),
+ ('GET', 'svm/svms', SRR['empty_records']),
+ # GET/POST/PATCH error.
+ ('GET', 'protocols/san/iscsi/credentials', SRR['generic_error']),
+ ('POST', 'protocols/san/iscsi/credentials', SRR['generic_error']),
+ ('PATCH', 'protocols/san/iscsi/credentials/e2e89ccc-db35-11e9/eui.0123456789abcdef', SRR['generic_error']),
+ ('DELETE', 'protocols/san/iscsi/credentials/e2e89ccc-db35-11e9/eui.0123456789abcdef', SRR['generic_error'])
+ ])
+ sec_obj = create_module(iscsi_object, DEFAULT_ARGS)
+ assert 'Error on fetching svm uuid' in expect_and_capture_ansible_exception(sec_obj.get_svm_uuid, 'fail')['msg']
+ assert 'Error on fetching svm uuid, SVM not found' in expect_and_capture_ansible_exception(sec_obj.get_svm_uuid, 'fail')['msg']
+ assert 'Error on fetching initiator' in expect_and_capture_ansible_exception(sec_obj.get_initiator, 'fail')['msg']
+ assert 'Error on creating initiator' in expect_and_capture_ansible_exception(sec_obj.create_initiator, 'fail')['msg']
+ assert 'Error on modifying initiator' in expect_and_capture_ansible_exception(sec_obj.modify_initiator, 'fail', {}, {})['msg']
+ assert 'Error on deleting initiator' in expect_and_capture_ansible_exception(sec_obj.delete_initiator, 'fail')['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_job_schedule.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_job_schedule.py
new file mode 100644
index 000000000..4ccec5115
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_job_schedule.py
@@ -0,0 +1,451 @@
+# (c) 2018, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests for Ansible module: na_ontap_job_schedule '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+import sys
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import patch_ansible,\
+ create_module, create_and_apply, expect_and_capture_ansible_exception
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke,\
+ register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_job_schedule \
+ import NetAppONTAPJob as job_module, main as uut_main # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+
+DEFAULT_ARGS = {
+ 'name': 'test_job',
+ 'job_minutes': [25],
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'use_rest': 'never'
+}
+
+
+cron_info = {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'job-schedule-cron-info': {
+ 'job-schedule-cluster': 'cluster1',
+ 'job-schedule-name': 'test_job',
+ 'job-schedule-cron-minute': {'cron-minute': 25}
+ }
+ }
+}
+
+
+multiple_cron_info = {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'job-schedule-cron-info': {
+ 'job-schedule-cluster': 'cluster1',
+ 'job-schedule-name': 'test_job',
+ 'job-schedule-cron-minute': [
+ {'cron-minute': '25'},
+ {'cron-minute': '35'}
+ ],
+ 'job-schedule-cron-month': [
+ {'cron-month': '5'},
+ {'cron-month': '10'}
+ ]
+ }
+ }
+}
+
+
+multiple_cron_minutes_info = {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'job-schedule-cron-info': {
+ 'job-schedule-cluster': 'cluster1',
+ 'job-schedule-name': 'test_job',
+ 'job-schedule-cron-minute': [{'cron-minute': str(x)} for x in range(60)],
+ 'job-schedule-cron-month': [
+ {'cron-month': '5'},
+ {'cron-month': '10'}
+ ]
+ }
+ }
+}
+
+
+ZRR = zapi_responses({
+ 'cron_info': build_zapi_response(cron_info),
+ 'multiple_cron_info': build_zapi_response(multiple_cron_info),
+ 'multiple_cron_minutes_info': build_zapi_response(multiple_cron_minutes_info)
+})
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors
+ with python 2.6, dictionaries are not ordered
+ '''
+ fragments = ["missing required arguments:", "hostname", "name"]
+ error = create_module(job_module, {}, fail=True)['msg']
+ for fragment in fragments:
+ assert fragment in error
+
+
+def test_get_nonexistent_job():
+ ''' Test if get_job_schedule returns None for non-existent job '''
+ register_responses([
+ ('job-schedule-cron-get-iter', ZRR['no_records'])
+ ])
+ job_obj = create_module(job_module, DEFAULT_ARGS)
+ assert job_obj.get_job_schedule() is None
+
+
+def test_get_existing_job():
+ ''' Test if get_job_schedule retuns job details for existing job '''
+ register_responses([
+ ('job-schedule-cron-get-iter', ZRR['cron_info'])
+ ])
+ job_obj = create_module(job_module, DEFAULT_ARGS)
+ result = job_obj.get_job_schedule()
+ assert result['name'] == DEFAULT_ARGS['name']
+ assert result['job_minutes'] == DEFAULT_ARGS['job_minutes']
+
+
+def test_get_existing_job_multiple_minutes():
+ # sourcery skip: class-extract-method
+ ''' Test if get_job_schedule retuns job details for existing job '''
+ register_responses([
+ ('job-schedule-cron-get-iter', ZRR['multiple_cron_info'])
+ ])
+ job_obj = create_module(job_module, DEFAULT_ARGS)
+ result = job_obj.get_job_schedule()
+ assert result['name'] == DEFAULT_ARGS['name']
+ assert result['job_minutes'] == [25, 35]
+ assert result['job_months'] == [5, 10]
+
+
+def test_get_existing_job_multiple_minutes_0_offset():
+ ''' Test if get_job_schedule retuns job details for existing job '''
+ register_responses([
+ ('job-schedule-cron-get-iter', ZRR['multiple_cron_info'])
+ ])
+ job_obj = create_module(job_module, DEFAULT_ARGS, {'month_offset': 0})
+ result = job_obj.get_job_schedule()
+ assert result['name'] == DEFAULT_ARGS['name']
+ assert result['job_minutes'] == [25, 35]
+ assert result['job_months'] == [5, 10]
+
+
+def test_get_existing_job_multiple_minutes_1_offset():
+ ''' Test if get_job_schedule retuns job details for existing job '''
+ register_responses([
+ ('job-schedule-cron-get-iter', ZRR['multiple_cron_info'])
+ ])
+ job_obj = create_module(job_module, DEFAULT_ARGS, {'month_offset': 1})
+ result = job_obj.get_job_schedule()
+ assert result['name'] == DEFAULT_ARGS['name']
+ assert result['job_minutes'] == [25, 35]
+ assert result['job_months'] == [5 + 1, 10 + 1]
+
+
+def test_create_error_missing_param():
+ ''' Test if create throws an error if job_minutes is not specified'''
+ register_responses([
+ ('job-schedule-cron-get-iter', ZRR['no_records'])
+ ])
+ args = DEFAULT_ARGS.copy()
+ del args['job_minutes']
+ error = 'Error: missing required parameter job_minutes for create'
+ assert error in create_and_apply(job_module, args, fail=True)['msg']
+
+
+def test_successful_create():
+ ''' Test successful create '''
+ register_responses([
+ ('job-schedule-cron-get-iter', ZRR['no_records']),
+ ('job-schedule-cron-create', ZRR['success'])
+ ])
+ assert create_and_apply(job_module, DEFAULT_ARGS)['changed']
+
+
+def test_successful_create_0_offset():
+ ''' Test successful create '''
+ register_responses([
+ ('job-schedule-cron-get-iter', ZRR['no_records']),
+ ('job-schedule-cron-create', ZRR['success'])
+ ])
+ args = {'month_offset': 0, 'job_months': [0, 8]}
+ assert create_and_apply(job_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_successful_create_1_offset():
+ ''' Test successful create '''
+ register_responses([
+ ('job-schedule-cron-get-iter', ZRR['no_records']),
+ ('job-schedule-cron-create', ZRR['success'])
+ ])
+ args = {'month_offset': 1, 'job_months': [1, 9], 'cluster': 'cluster1'}
+ assert create_and_apply(job_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_create_idempotency():
+ ''' Test create idempotency '''
+ register_responses([
+ ('job-schedule-cron-get-iter', ZRR['cron_info'])
+ ])
+ assert not create_and_apply(job_module, DEFAULT_ARGS)['changed']
+
+
+def test_successful_delete():
+ ''' Test delete existing job '''
+ register_responses([
+ ('job-schedule-cron-get-iter', ZRR['cron_info']),
+ ('job-schedule-cron-destroy', ZRR['success'])
+ ])
+ assert create_and_apply(job_module, DEFAULT_ARGS, {'state': 'absent'})['changed']
+
+
+def test_delete_idempotency():
+ ''' Test delete idempotency '''
+ register_responses([
+ ('job-schedule-cron-get-iter', ZRR['no_records'])
+ ])
+ assert not create_and_apply(job_module, DEFAULT_ARGS, {'state': 'absent'})['changed']
+
+
+def test_successful_modify():
+ ''' Test successful modify job_minutes '''
+ register_responses([
+ ('job-schedule-cron-get-iter', ZRR['cron_info']),
+ ('job-schedule-cron-modify', ZRR['success'])
+ ])
+ assert create_and_apply(job_module, DEFAULT_ARGS, {'job_minutes': '20'})['changed']
+
+
+def test_modify_idempotency():
+ ''' Test modify idempotency '''
+ register_responses([
+ ('job-schedule-cron-get-iter', ZRR['cron_info'])
+ ])
+ assert not create_and_apply(job_module, DEFAULT_ARGS)['changed']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.has_netapp_lib')
+def test_negative_no_netapp_lib(mock_has):
+ mock_has.return_value = False
+ error = 'the python NetApp-Lib module is required'
+ assert error in create_module(job_module, DEFAULT_ARGS, fail=True)['msg']
+
+
+def test_zapi_get_all_minutes():
+ register_responses([
+ ('job-schedule-cron-get-iter', ZRR['multiple_cron_minutes_info'])
+ ])
+ job_obj = create_module(job_module, DEFAULT_ARGS)
+ schedule = job_obj.get_job_schedule()
+ assert schedule
+ assert 'job_minutes' in schedule
+ assert schedule['job_minutes'] == [-1]
+
+
+def test_if_all_methods_catch_exception_zapi():
+ ''' test error zapi - get/create/modify/delete'''
+ register_responses([
+ ('job-schedule-cron-get-iter', ZRR['error']),
+ ('job-schedule-cron-create', ZRR['error']),
+ ('job-schedule-cron-modify', ZRR['error']),
+ ('job-schedule-cron-destroy', ZRR['error'])
+ ])
+ job_obj = create_module(job_module, DEFAULT_ARGS)
+
+ assert 'Error fetching job schedule' in expect_and_capture_ansible_exception(job_obj.get_job_schedule, 'fail')['msg']
+ assert 'Error creating job schedule' in expect_and_capture_ansible_exception(job_obj.create_job_schedule, 'fail')['msg']
+ assert 'Error modifying job schedule' in expect_and_capture_ansible_exception(job_obj.modify_job_schedule, 'fail', {}, {})['msg']
+ assert 'Error deleting job schedule' in expect_and_capture_ansible_exception(job_obj.delete_job_schedule, 'fail')['msg']
+
+
+SRR = rest_responses({
+ 'get_schedule': (200, {"records": [
+ {
+ "uuid": "010df156-e0a9-11e9-9f70-005056b3df08",
+ "name": "test_job",
+ "cron": {
+ "minutes": [25],
+ "hours": [0],
+ "weekdays": [0],
+ "months": [5, 6]
+ }
+ }
+ ], "num_records": 1}, None),
+ 'get_all_minutes': (200, {"records": [
+ {
+ "uuid": "010df156-e0a9-11e9-9f70-005056b3df08",
+ "name": "test_job",
+ "cron": {
+ "minutes": range(60),
+ "hours": [0],
+ "weekdays": [0],
+ "months": [5, 6]
+ }
+ }
+ ], "num_records": 1}, None)
+})
+
+
+DEFAULT_ARGS_REST = {
+ 'name': 'test_job',
+ 'job_minutes': [25],
+ 'job_hours': [0],
+ 'job_days_of_week': [0],
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'use_rest': 'always'
+}
+
+
+def test_rest_successful_create():
+ '''Test successful rest create'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster/schedules', SRR['zero_records']),
+ ('POST', 'cluster/schedules', SRR['success']),
+ ])
+ assert create_and_apply(job_module, DEFAULT_ARGS_REST)['changed']
+
+
+def test_rest_create_idempotency():
+ '''Test rest create idempotency'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster/schedules', SRR['get_schedule'])
+ ])
+ assert not create_and_apply(job_module, DEFAULT_ARGS_REST)['changed']
+
+
+def test_rest_get_0_offset():
+ '''Test rest get using month offset'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster/schedules', SRR['get_schedule'])
+ ])
+ job_obj = create_module(job_module, DEFAULT_ARGS_REST, {'month_offset': 0})
+ record = job_obj.get_job_schedule_rest()
+ assert record
+ assert record['job_months'] == [x - 1 for x in SRR['get_schedule'][1]['records'][0]['cron']['months']]
+
+
+def test_rest_get_1_offset():
+ '''Test rest get using month offset'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster/schedules', SRR['get_schedule'])
+ ])
+ job_obj = create_module(job_module, DEFAULT_ARGS_REST, {'month_offset': 1})
+ record = job_obj.get_job_schedule_rest()
+ assert record
+ assert record['job_months'] == SRR['get_schedule'][1]['records'][0]['cron']['months']
+
+
+def test_rest_create_all_minutes():
+ '''Test rest create using month offset'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster/schedules', SRR['zero_records']),
+ ('POST', 'cluster/schedules', SRR['success'])
+ ])
+ assert create_and_apply(job_module, DEFAULT_ARGS_REST, {'job_minutes': [-1]})['changed']
+
+
+def test_rest_create_0_offset():
+ '''Test rest create using month offset'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster/schedules', SRR['zero_records']),
+ ('POST', 'cluster/schedules', SRR['success'])
+ ])
+ args = {'month_offset': 0, 'job_months': [0, 8]}
+ assert create_and_apply(job_module, DEFAULT_ARGS_REST, args)['changed']
+
+
+def test_rest_create_1_offset():
+ '''Test rest create using month offset'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster/schedules', SRR['zero_records']),
+ ('POST', 'cluster/schedules', SRR['success'])
+ ])
+ args = {'month_offset': 1, 'job_months': [1, 9]}
+ assert create_and_apply(job_module, DEFAULT_ARGS_REST, args)['changed']
+
+
+def test_rest_modify_0_offset():
+ '''Test rest modify using month offset'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster/schedules', SRR['get_schedule']),
+ ('PATCH', 'cluster/schedules/010df156-e0a9-11e9-9f70-005056b3df08', SRR['success'])
+ ])
+ args = {'month_offset': 0, 'job_months': [0, 8]}
+ assert create_and_apply(job_module, DEFAULT_ARGS_REST, args)['changed']
+
+
+def test_rest_modify_1_offset():
+ '''Test rest modify using month offset'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster/schedules', SRR['get_schedule']),
+ ('PATCH', 'cluster/schedules/010df156-e0a9-11e9-9f70-005056b3df08', SRR['success'])
+ ])
+ args = {'month_offset': 1, 'job_months': [1, 9], 'cluster': 'cluster1'}
+ assert create_and_apply(job_module, DEFAULT_ARGS_REST, args)['changed']
+
+
+def test_negative_month_of_0():
+ '''Test rest modify using month offset'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0'])
+ ])
+ args = {'month_offset': 1, 'job_months': [0, 9]}
+ error = 'Error: 0 is not a valid value in months if month_offset is set to 1'
+ assert error in create_module(job_module, DEFAULT_ARGS_REST, args, fail=True)['msg']
+
+
+def test_rest_get_all_minutes():
+ '''Test rest modify using month offset'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster/schedules', SRR['get_all_minutes'])
+ ])
+ args = {'month_offset': 1, 'job_months': [1, 9]}
+ job_obj = create_module(job_module, DEFAULT_ARGS_REST, args)
+ schedule = job_obj.get_job_schedule()
+ assert schedule
+ assert 'job_minutes' in schedule
+ assert schedule['job_minutes'] == [-1]
+
+
+def test_if_all_methods_catch_exception_rest():
+ ''' test error zapi - get/create/modify/delete'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster/schedules', SRR['generic_error']),
+ ('POST', 'cluster/schedules', SRR['generic_error']),
+ ('PATCH', 'cluster/schedules/abcd', SRR['generic_error']),
+ ('DELETE', 'cluster/schedules/abcd', SRR['generic_error'])
+ ])
+ job_obj = create_module(job_module, DEFAULT_ARGS_REST)
+ job_obj.uuid = 'abcd'
+ assert 'Error fetching job schedule' in expect_and_capture_ansible_exception(job_obj.get_job_schedule, 'fail')['msg']
+ assert 'Error creating job schedule' in expect_and_capture_ansible_exception(job_obj.create_job_schedule, 'fail')['msg']
+ assert 'Error modifying job schedule' in expect_and_capture_ansible_exception(job_obj.modify_job_schedule, 'fail', {}, {})['msg']
+ assert 'Error deleting job schedule' in expect_and_capture_ansible_exception(job_obj.delete_job_schedule, 'fail')['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_kerberos_interface.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_kerberos_interface.py
new file mode 100644
index 000000000..ada9b4328
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_kerberos_interface.py
@@ -0,0 +1,107 @@
+# Copyright: NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import sys
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import patch_ansible, \
+ create_and_apply, create_module, expect_and_capture_ansible_exception, call_main, assert_warning_was_raised, print_warnings
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke, \
+ register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_kerberos_interface \
+ import NetAppOntapKerberosInterface as my_module, main as my_main # module under test
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'use_rest': 'always',
+ 'enabled': False,
+ 'interface_name': 'lif1',
+ 'vserver': 'ansibleSVM'
+}
+
+
+SRR = rest_responses({
+ 'kerberos_int_conf_enabled': (200, {"records": [{
+ "spn": "nfs/life2@RELAM2",
+ "machine_account": "account1",
+ "interface": {
+ "ip": {"address": "10.10.10.7"},
+ "name": "lif1",
+ "uuid": "1cd8a442"
+ },
+ "enabled": True,
+ }], "num_records": 1}, None),
+ 'kerberos_int_conf_disabled': (200, {"records": [{
+ "interface": {
+ "ip": {"address": "10.10.10.7"},
+ "name": "lif1",
+ "uuid": "1cd8a442"
+ },
+ "enabled": False,
+ }], "num_records": 1}, None),
+})
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ # with python 2.6, dictionaries are not ordered
+ fragments = ["missing required arguments:", "hostname"]
+ error = create_module(my_module, {}, fail=True)['msg']
+ for fragment in fragments:
+ assert fragment in error
+
+
+def test_enable_kerberos_int_conf():
+ ''' enable kerberos int conf '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_12_1']),
+ ('GET', 'protocols/nfs/kerberos/interfaces', SRR['kerberos_int_conf_disabled']),
+ ('PATCH', 'protocols/nfs/kerberos/interfaces/1cd8a442', SRR['success']),
+ ('GET', 'cluster', SRR['is_rest_9_12_1']),
+ ('GET', 'protocols/nfs/kerberos/interfaces', SRR['kerberos_int_conf_enabled'])
+ ])
+ args = {
+ "spn": "nfs/life2@RELAM2",
+ "machine_account": "account1",
+ "admin_username": "user1",
+ "admin_password": "pass1",
+ "enabled": True
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_all_methods_catch_exception():
+ ''' test exception in get/create/modify/delete '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ # GET/PATCH error.
+ ('GET', 'protocols/nfs/kerberos/interfaces', SRR['generic_error']),
+ ('PATCH', 'protocols/nfs/kerberos/interfaces/1cd8a442', SRR['generic_error'])
+ ])
+ ker_obj = create_module(my_module, DEFAULT_ARGS)
+ ker_obj.uuid = '1cd8a442'
+ assert 'Error fetching kerberos interface' in expect_and_capture_ansible_exception(ker_obj.get_kerberos_interface, 'fail')['msg']
+ assert 'Error modifying kerberos interface' in expect_and_capture_ansible_exception(ker_obj.modify_kerberos_interface, 'fail')['msg']
+
+
+def test_error_ontap97():
+ ''' test module supported from 9.7 '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96'])
+ ])
+ assert 'requires ONTAP 9.7.0 or later' in call_main(my_main, DEFAULT_ARGS, fail=True)['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_kerberos_realm.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_kerberos_realm.py
new file mode 100644
index 000000000..30f577d4c
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_kerberos_realm.py
@@ -0,0 +1,213 @@
+# (c) 2018-2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test for ONTAP Kerberos Realm module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import sys
+import pytest
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_kerberos_realm \
+ import NetAppOntapKerberosRealm as my_module # module under test
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import patch_ansible,\
+ create_module, create_and_apply, expect_and_capture_ansible_exception
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke,\
+ register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_responses
+
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not be available')
+
+DEFAULT_ARGS = {
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'https': True,
+ 'validate_certs': False,
+ 'use_rest': 'never',
+ 'realm': 'NETAPP.COM',
+ 'vserver': 'vserver1',
+ 'kdc_ip': '192.168.0.1',
+ 'kdc_vendor': 'other'
+}
+
+kerberos_info = {
+ 'num-records': "1",
+ 'attributes-list': {
+ 'kerberos-realm': {
+ 'admin-server-ip': "192.168.0.1",
+ 'admin-server-port': "749",
+ 'clock-skew': "5",
+ 'kdc-ip': "192.168.0.1",
+ 'kdc-port': "88",
+ 'kdc-vendor': "other",
+ 'password-server-ip': "192.168.0.1",
+ 'password-server-port': "464",
+ "permitted-enc-types": {
+ "string": ["des", "des3", "aes_128", "aes_256"]
+ },
+ 'realm': "NETAPP.COM",
+ 'vserver-name': "vserver1"
+ }
+ }
+}
+
+
+ZRR = zapi_responses({
+ 'kerberos_info': build_zapi_response(kerberos_info)
+})
+
+
+SRR = rest_responses({
+ 'kerberos_info': (200, {"records": [{
+ "svm": {
+ "uuid": "89368b07",
+ "name": "svm3"
+ },
+ "name": "name1",
+ "kdc": {
+ "vendor": "microsoft",
+ "ip": "10.193.115.116",
+ "port": 88
+ },
+ "comment": "mohan",
+ "ad_server": {
+ "name": "netapp",
+ "address": "10.193.115.116"
+ }
+ }], "num_records": 1}, None)
+})
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ # with python 2.6, dictionaries are not ordered
+ fragments = ["missing required arguments:", "hostname", "realm", "vserver"]
+ error = create_module(my_module, {}, fail=True)['msg']
+ for fragment in fragments:
+ assert fragment in error
+
+
+def test_module_fail_when_state_present_required_args_missing():
+ ''' required arguments are reported as errors '''
+ DEFAULT_ARGS_COPY = DEFAULT_ARGS.copy()
+ del DEFAULT_ARGS_COPY['kdc_ip']
+ del DEFAULT_ARGS_COPY['kdc_vendor']
+ error = "state is present but all of the following are missing: kdc_vendor, kdc_ip"
+ assert error in create_module(my_module, DEFAULT_ARGS_COPY, fail=True)['msg']
+
+
+def test_get_existing_realm():
+ ''' Test if get_krbrealm returns details for existing kerberos realm '''
+ register_responses([
+ ('kerberos-realm-get-iter', ZRR['kerberos_info'])
+ ])
+ kerb_obj = create_module(my_module, DEFAULT_ARGS)
+ assert kerb_obj.get_krbrealm()
+
+
+def test_successfully_modify_realm():
+ ''' Test modify realm successful for modifying kdc_ip. '''
+ register_responses([
+ ('kerberos-realm-get-iter', ZRR['kerberos_info']),
+ ('kerberos-realm-modify', ZRR['success'])
+ ])
+ assert create_and_apply(my_module, DEFAULT_ARGS, {'kdc_ip': '10.1.1.20'})
+
+
+def test_successfully_delete_realm():
+ ''' Test successfully delete realm '''
+ register_responses([
+ ('kerberos-realm-get-iter', ZRR['kerberos_info']),
+ ('kerberos-realm-delete', ZRR['success'])
+ ])
+ assert create_and_apply(my_module, DEFAULT_ARGS, {'state': 'absent'})
+
+
+def test_successfully_create_realm():
+ ''' Test successfully create realm '''
+ register_responses([
+ ('kerberos-realm-get-iter', ZRR['no_records']),
+ ('kerberos-realm-create', ZRR['success'])
+ ])
+ assert create_and_apply(my_module, DEFAULT_ARGS)
+
+
+def test_required_if():
+ ''' required arguments are reported as errors '''
+ error = "kdc_vendor is microsoft but all of the following are missing: ad_server_ip, ad_server_name"
+ assert error in create_module(my_module, DEFAULT_ARGS, {'kdc_vendor': 'microsoft'}, fail=True)['msg']
+
+ error = "kdc_vendor is microsoft but all of the following are missing: ad_server_name"
+ args = {'kdc_vendor': 'microsoft', 'ad_server_ip': '10.0.0.1'}
+ assert error in create_module(my_module, DEFAULT_ARGS, args, fail=True)['msg']
+
+
+def test_if_all_methods_catch_exception():
+ register_responses([
+ ('kerberos-realm-get-iter', ZRR['error']),
+ ('kerberos-realm-create', ZRR['error']),
+ ('kerberos-realm-modify', ZRR['error']),
+ ('kerberos-realm-delete', ZRR['error']),
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'protocols/nfs/kerberos/realms', SRR['generic_error']),
+ ('POST', 'protocols/nfs/kerberos/realms', SRR['generic_error']),
+ ('PATCH', 'protocols/nfs/kerberos/realms/89368b07/NETAPP.COM', SRR['generic_error']),
+ ('DELETE', 'protocols/nfs/kerberos/realms/89368b07/NETAPP.COM', SRR['generic_error'])
+ ])
+ kerb_obj = create_module(my_module, DEFAULT_ARGS)
+ assert 'Error fetching kerberos realm' in expect_and_capture_ansible_exception(kerb_obj.get_krbrealm, 'fail')['msg']
+ assert 'Error creating Kerberos Realm' in expect_and_capture_ansible_exception(kerb_obj.create_krbrealm, 'fail')['msg']
+ assert 'Error modifying Kerberos Realm' in expect_and_capture_ansible_exception(kerb_obj.modify_krbrealm, 'fail', {})['msg']
+ assert 'Error deleting Kerberos Realm' in expect_and_capture_ansible_exception(kerb_obj.delete_krbrealm, 'fail')['msg']
+
+ kerb_obj = create_module(my_module, DEFAULT_ARGS, {'use_rest': 'always'})
+ kerb_obj.svm_uuid = '89368b07'
+ assert 'Error fetching kerberos realm' in expect_and_capture_ansible_exception(kerb_obj.get_krbrealm, 'fail')['msg']
+ assert 'Error creating Kerberos Realm' in expect_and_capture_ansible_exception(kerb_obj.create_krbrealm, 'fail')['msg']
+ assert 'Error modifying Kerberos Realm' in expect_and_capture_ansible_exception(kerb_obj.modify_krbrealm, 'fail', {})['msg']
+ assert 'Error deleting Kerberos Realm' in expect_and_capture_ansible_exception(kerb_obj.delete_krbrealm, 'fail')['msg']
+
+
+def test_successfully_create_realm_rest():
+ ''' Test successfully create realm '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'protocols/nfs/kerberos/realms', SRR['empty_records']),
+ ('POST', 'protocols/nfs/kerberos/realms', SRR['success']),
+ ])
+ assert create_and_apply(my_module, DEFAULT_ARGS, {'use_rest': 'always'})
+
+
+def test_successfully_modify_realm_rest():
+ ''' Test modify realm successful for modifying kdc_ip. '''
+ register_responses([
+ # modify ip.
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'protocols/nfs/kerberos/realms', SRR['kerberos_info']),
+ ('PATCH', 'protocols/nfs/kerberos/realms/89368b07/NETAPP.COM', SRR['success']),
+ # modify port.
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'protocols/nfs/kerberos/realms', SRR['kerberos_info']),
+ ('PATCH', 'protocols/nfs/kerberos/realms/89368b07/NETAPP.COM', SRR['success']),
+ ])
+ assert create_and_apply(my_module, DEFAULT_ARGS, {'use_rest': 'always', 'kdc_ip': '10.1.1.20'})
+ assert create_and_apply(my_module, DEFAULT_ARGS, {'use_rest': 'always', 'kdc_port': '8088'})
+
+
+def test_successfully_delete_realm_rest():
+ ''' Test successfully delete realm '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'protocols/nfs/kerberos/realms', SRR['kerberos_info']),
+ ('DELETE', 'protocols/nfs/kerberos/realms/89368b07/NETAPP.COM', SRR['success'])
+ ])
+ assert create_and_apply(my_module, DEFAULT_ARGS, {'use_rest': 'always', 'state': 'absent'})
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ldap_client.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ldap_client.py
new file mode 100644
index 000000000..4df8d9fee
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ldap_client.py
@@ -0,0 +1,481 @@
+# (c) 2018-2023, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests for Ansible module: na_ontap_ldap_client '''
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ patch_ansible, call_main, create_module, expect_and_capture_ansible_exception, AnsibleFailJson
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke, register_responses, get_mock_record
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_ldap_client \
+ import NetAppOntapLDAPClient as client_module, main as my_main # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+# REST API canned responses when mocking send_request
+SRR = rest_responses({
+ # module specific responses
+ 'ldap_record': (
+ 200,
+ {
+ "records": [
+ {
+ "svm": {
+ "uuid": "671aa46e-11ad-11ec-a267-005056b30cfa",
+ "name": "vserver"
+ },
+ "servers": ['10.193.115.116'],
+ "schema": 'RFC-2307',
+ "target": {
+ "name": "20:05:00:50:56:b3:0c:fa"
+ }
+ }
+ ],
+ "num_records": 1
+ }, None
+ ),
+ "no_record": (
+ 200,
+ {"num_records": 0},
+ None),
+ "svm": (
+ 200,
+ {"records": [{"uuid": "671aa46e"}]},
+ None)
+})
+
+
+ldap_client_info = {'num-records': 1,
+ 'attributes-list':
+ {'ldap-client':
+ {'ldap-client-config': 'test_ldap',
+ 'schema': 'RFC-2307',
+ 'ldap-servers': [{"ldap-server": '10.193.115.116'}, ]
+ }
+ },
+ }
+
+ZRR = zapi_responses({
+ 'ldap_client_info': build_zapi_response(ldap_client_info)
+})
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'vserver': 'vserver',
+ 'name': 'test_ldap',
+ 'schema': 'RFC-2307',
+ 'use_rest': 'never',
+}
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ client_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+
+def test_get_nonexistent_client():
+ ''' Test if get ldap client returns None for non-existent job '''
+ register_responses([
+ ('ldap-client-get-iter', ZRR['empty'])
+ ])
+ ldap_obj = create_module(client_module, DEFAULT_ARGS)
+ result = ldap_obj.get_ldap_client()
+ assert result is None
+
+
+def test_error_name_required_zapi():
+ ''' name is required with ZAPI '''
+ error = 'Error: name is a required field with ZAPI.'
+ assert error in create_module(client_module, DEFAULT_ARGS, {'name': None}, fail=True)['msg']
+
+
+def test_get_existing_client():
+ ''' Test if get ldap client returns None for non-existent job '''
+ register_responses([
+ ('ldap-client-get-iter', ZRR['ldap_client_info'])
+ ])
+ ldap_obj = create_module(client_module, DEFAULT_ARGS)
+ result = ldap_obj.get_ldap_client()
+ assert result
+
+
+def test_successfully_create_zapi():
+ register_responses([
+ ('ldap-client-get-iter', ZRR['empty']),
+ ('ldap-client-create', ZRR['success']),
+ ])
+ module_args = {
+ 'name': 'test_ldap',
+ 'ldap_servers': ['10.193.115.116'],
+ 'schema': 'RFC-2307'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_create_zapi():
+ register_responses([
+ ('ldap-client-get-iter', ZRR['empty']),
+ ('ldap-client-create', ZRR['error']),
+ ])
+ module_args = {
+ 'name': 'test_ldap',
+ 'ldap_servers': ['10.193.115.116'],
+ 'schema': 'RFC-2307'
+ }
+ error = call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ msg = "Error creating LDAP client"
+ assert msg in error
+
+
+def test_error_create_ad_zapi():
+ register_responses([
+ ('ldap-client-get-iter', ZRR['empty']),
+ ('ldap-client-create', ZRR['error']),
+ ])
+ module_args = {
+ 'name': 'test_ldap',
+ 'ad_domain': 'ad.netapp.com',
+ 'preferred_ad_servers': ['10.193.115.116'],
+ 'schema': 'RFC-2307'
+ }
+ error = call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ msg = "Error creating LDAP client"
+ assert msg in error
+
+
+def test_create_idempotency():
+ register_responses([
+ ('ldap-client-get-iter', ZRR['ldap_client_info']),
+ ])
+ module_args = {
+ 'name': 'test_ldap',
+ 'servers': ['10.193.115.116'],
+ 'schema': 'RFC-2307',
+ 'state': 'present'
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successfully_delete():
+ register_responses([
+ ('ldap-client-get-iter', ZRR['ldap_client_info']),
+ ('ldap-client-delete', ZRR['success']),
+ ])
+ module_args = {
+ 'name': 'test_ldap',
+ 'ldap_servers': ['10.193.115.116'],
+ 'schema': 'RFC-2307',
+ 'state': 'absent'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_delete_zapi():
+ register_responses([
+ ('ldap-client-get-iter', ZRR['ldap_client_info']),
+ ('ldap-client-delete', ZRR['error']),
+ ])
+ module_args = {
+ 'name': 'test_ldap',
+ 'ldap_servers': ['10.193.115.116'],
+ 'schema': 'RFC-2307',
+ 'state': 'absent'
+ }
+ error = call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ msg = "Error deleting LDAP client configuration"
+ assert msg in error
+
+
+def test_delete_idempotency():
+ register_responses([
+ ('ldap-client-get-iter', ZRR['empty']),
+ ])
+ module_args = {
+ 'state': 'absent'
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_ldap_servers():
+ register_responses([
+ ('ldap-client-get-iter', ZRR['ldap_client_info']),
+ ('ldap-client-modify', ZRR['success']),
+ ])
+ module_args = {
+ 'name': 'test_ldap',
+ 'ldap_servers': ['10.195.64.121'],
+ 'schema': 'RFC-2307',
+ 'ldaps_enabled': True,
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_ldap_ad_servers():
+ register_responses([
+ ('ldap-client-get-iter', ZRR['ldap_client_info']),
+ ('ldap-client-modify', ZRR['success']),
+ ])
+ module_args = {
+ 'name': 'test_ldap',
+ 'ad_domain': 'ad.netapp.com',
+ 'preferred_ad_servers': ['10.195.64.121'],
+ 'schema': 'RFC-2307',
+ 'ldaps_enabled': True,
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_ldap_schema_zapi():
+ register_responses([
+ ('ldap-client-get-iter', ZRR['ldap_client_info']),
+ ('ldap-client-modify', ZRR['success']),
+ ])
+ module_args = {
+ 'name': 'test_ldap',
+ 'ldap_servers': ['10.195.64.121'],
+ 'schema': 'MS-AD-BIS',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_if_all_methods_catch_exception():
+ register_responses([
+ ('ldap-client-create', ZRR['error']),
+ ('ldap-client-delete', ZRR['error']),
+ ('ldap-client-modify', ZRR['error'])
+ ])
+ module_args = {'name': 'test_ldap'}
+ my_obj = create_module(client_module, DEFAULT_ARGS, module_args)
+
+ error = expect_and_capture_ansible_exception(my_obj.create_ldap_client, 'fail')['msg']
+ assert 'Error creating LDAP client test_ldap: NetApp API failed. Reason - 12345:synthetic error for UT purpose' in error
+
+ error = expect_and_capture_ansible_exception(my_obj.delete_ldap_client, 'fail')['msg']
+ assert 'Error deleting LDAP client configuration test_ldap: NetApp API failed. Reason - 12345:synthetic error for UT purpose' in error
+
+ error = expect_and_capture_ansible_exception(my_obj.modify_ldap_client, 'fail', 'ldap-client-modify')['msg']
+ assert 'Error modifying LDAP client test_ldap: NetApp API failed. Reason - 12345:synthetic error for UT purpose' in error
+
+
+ARGS_REST = {
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'use_rest': 'always',
+ 'vserver': 'vserver',
+ 'servers': ['10.193.115.116'],
+ 'schema': 'RFC-2307',
+}
+
+
+def test_get_nonexistent_ldap_config_rest():
+ ''' Test if get_unix_user returns None for non-existent user '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'name-services/ldap', SRR['empty_records']),
+ ])
+ ldap_obj = create_module(client_module, ARGS_REST)
+ result = ldap_obj.get_ldap_client_rest()
+ assert result is None
+
+
+def test_get_existent_ldap_config_rest():
+ ''' Test if get_unix_user returns existent user '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'name-services/ldap', SRR['ldap_record']),
+ ])
+ ldap_obj = create_module(client_module, ARGS_REST)
+ result = ldap_obj.get_ldap_client_rest()
+ assert result
+
+
+def test_get_error_ldap_config_rest():
+ ''' Test if get_unix_user returns existent user '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'name-services/ldap', SRR['generic_error']),
+ ])
+ error = call_main(my_main, ARGS_REST, fail=True)['msg']
+ msg = "Error on getting idap client info:"
+ assert msg in error
+
+
+def test_create_ldap_client_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'name-services/ldap', SRR['empty_records']),
+ ('GET', 'svm/svms', SRR['svm']),
+ ('POST', 'name-services/ldap', SRR['empty_good']),
+ ])
+ module_args = {
+ 'ldap_servers': ['10.193.115.116'],
+ 'schema': 'RFC-2307'
+ }
+ assert call_main(my_main, ARGS_REST, module_args)['changed']
+
+
+def test_error_create_ldap_client_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'name-services/ldap', SRR['empty_records']),
+ ('GET', 'svm/svms', SRR['svm']),
+ ('POST', 'name-services/ldap', SRR['generic_error']),
+ ])
+ module_args = {
+ 'servers': ['10.193.115.116'],
+ 'schema': 'RFC-2307'
+ }
+ error = call_main(my_main, ARGS_REST, module_args, fail=True)['msg']
+ msg = "Error on creating ldap client:"
+ assert msg in error
+
+
+def test_delete_ldap_client_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'name-services/ldap', SRR['ldap_record']),
+ ('DELETE', 'name-services/ldap/671aa46e-11ad-11ec-a267-005056b30cfa', SRR['empty_good']),
+ ])
+ module_args = {
+ 'servers': ['10.193.115.116'],
+ 'schema': 'RFC-2307',
+ 'state': 'absent'
+ }
+ assert call_main(my_main, ARGS_REST, module_args)['changed']
+
+
+def test_error_delete_ldap_client_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'name-services/ldap', SRR['ldap_record']),
+ ('DELETE', 'name-services/ldap/671aa46e-11ad-11ec-a267-005056b30cfa', SRR['generic_error']),
+ ])
+ module_args = {
+ 'servers': ['10.193.115.116'],
+ 'schema': 'RFC-2307',
+ 'state': 'absent'
+ }
+ error = call_main(my_main, ARGS_REST, module_args, fail=True)['msg']
+ msg = "Error on deleting ldap client rest:"
+ assert msg in error
+
+
+def test_create_idempotent_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'name-services/ldap', SRR['ldap_record']),
+ ])
+ module_args = {
+ 'state': 'present',
+ 'servers': ['10.193.115.116'],
+ 'schema': 'RFC-2307',
+ }
+ assert not call_main(my_main, ARGS_REST, module_args)['changed']
+
+
+def test_error_on_cluster_vserver():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'name-services/ldap', SRR['empty_records']),
+ ('GET', 'svm/svms', SRR['empty_records']),
+ ])
+ module_args = {
+ 'state': 'present',
+ 'servers': ['10.193.115.116'],
+ 'schema': 'RFC-2307',
+ }
+ assert 'is not a data vserver.' in call_main(my_main, ARGS_REST, module_args, fail=True)['msg']
+
+
+def test_delete_idempotent_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'name-services/ldap', SRR['empty_records'])
+ ])
+ module_args = {
+ 'state': 'absent'
+ }
+ assert not call_main(my_main, ARGS_REST, module_args)['changed']
+
+
+def test_modify_schema_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'name-services/ldap', SRR['ldap_record']),
+ ('PATCH', 'name-services/ldap/671aa46e-11ad-11ec-a267-005056b30cfa', SRR['empty_good'])
+ ])
+ module_args = {
+ 'state': 'present',
+ 'servers': ['10.193.115.116'],
+ 'schema': 'AD-IDMU',
+ }
+ assert call_main(my_main, ARGS_REST, module_args)['changed']
+
+
+def test_modify_ldap_servers_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'name-services/ldap', SRR['ldap_record']),
+ ('PATCH', 'name-services/ldap/671aa46e-11ad-11ec-a267-005056b30cfa', SRR['empty_good'])
+ ])
+ module_args = {
+ 'state': 'present',
+ 'servers': ['10.195.64.121'],
+ 'schema': 'AD-IDMU',
+ 'ldaps_enabled': True,
+ 'skip_config_validation': True
+ }
+ assert call_main(my_main, ARGS_REST, module_args)['changed']
+
+
+def test_negative_modify_ldap_servers_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'name-services/ldap', SRR['ldap_record']),
+ ('PATCH', 'name-services/ldap/671aa46e-11ad-11ec-a267-005056b30cfa', SRR['generic_error'])
+ ])
+ module_args = {
+ 'state': 'present',
+ 'servers': ['10.195.64.121'],
+ 'schema': 'AD-IDMU',
+ }
+ error = call_main(my_main, ARGS_REST, module_args, fail=True)['msg']
+ msg = "Error on modifying ldap client config:"
+ assert msg in error
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.HAS_NETAPP_LIB', False)
+def test_module_fail_when_netapp_lib_missing():
+ ''' required lib missing '''
+ module_args = {
+ 'use_rest': 'never',
+ }
+ assert 'Error: the python NetApp-Lib module is required. Import error: None' in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_error_no_server():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'name-services/ldap', SRR['ldap_record']),
+ ])
+ args = dict(ARGS_REST)
+ args.pop('servers')
+ error = 'Required one of servers or ad_domain'
+ assert error in call_main(my_main, args, fail=True)['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_license.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_license.py
new file mode 100644
index 000000000..1683d2577
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_license.py
@@ -0,0 +1,432 @@
+# (c) 2022-2023, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test for ONTAP license Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import sys
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import\
+ assert_no_warnings, call_main, create_module, expect_and_capture_ansible_exception, patch_ansible, print_warnings
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import\
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_error_message, rest_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, build_zapi_error, zapi_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_license import NetAppOntapLicense as my_module, main as my_main, HAS_DEEPDIFF
+
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not be available')
+
+
+def license_status(fcp_method):
+ return {
+ 'license-v2-status': [
+ {'license-v2-status-info':
+ {
+ 'package': 'base',
+ 'method': 'site'
+ }},
+ {'license-v2-status-info':
+ {
+ 'package': 'capacitypool',
+ 'method': 'none'
+ }},
+ {'license-v2-status-info':
+ {
+ 'package': 'cifs',
+ 'method': 'site'
+ }},
+ {'license-v2-status-info':
+ {
+ 'package': 'fcp',
+ 'method': fcp_method
+ }},
+ ]
+ }
+
+
+ZRR = zapi_responses({
+ 'license_status_fcp_none': build_zapi_response(license_status('none')),
+ 'license_status_fcp_site': build_zapi_response(license_status('site')),
+ 'error_object_not_found': build_zapi_error('15661', 'license is not active')
+})
+
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+}
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.has_netapp_lib')
+def test_fail_netapp_lib_error(mock_has_netapp_lib):
+ mock_has_netapp_lib.return_value = False
+ module_args = {
+ "use_rest": "never"
+ }
+ assert 'Error: the python NetApp-Lib module is required. Import error: None' == call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_module_add_license_zapi():
+ ''' Test add license '''
+ register_responses([
+ ('ZAPI', 'license-v2-status-list-info', ZRR['license_status_fcp_none']),
+ ('ZAPI', 'license-v2-add', ZRR['success']),
+ ('ZAPI', 'license-v2-status-list-info', ZRR['license_status_fcp_site']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'license_codes': 'LICENSECODE',
+ }
+ print('ZRR', build_zapi_response(license_status('site'))[0].to_string())
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_module_add_license_idempotent_zapi():
+ ''' Test add license idempotent '''
+ register_responses([
+ ('ZAPI', 'license-v2-status-list-info', ZRR['license_status_fcp_site']),
+ ('ZAPI', 'license-v2-add', ZRR['success']),
+ ('ZAPI', 'license-v2-status-list-info', ZRR['license_status_fcp_site']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'license_codes': 'LICENSECODE',
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_module_remove_license_zapi():
+ ''' Test remove license '''
+ register_responses([
+ ('ZAPI', 'license-v2-status-list-info', ZRR['license_status_fcp_site']),
+ ('ZAPI', 'license-v2-delete', ZRR['success']),
+ ('ZAPI', 'license-v2-delete', ZRR['success']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'serial_number': '1-8-000000',
+ 'license_names': 'cifs,fcp',
+ 'state': 'absent',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_module_remove_license_idempotent_zapi():
+ ''' Test remove license idempotent '''
+ register_responses([
+ ('ZAPI', 'license-v2-status-list-info', ZRR['license_status_fcp_site']),
+ ('ZAPI', 'license-v2-delete', ZRR['error_object_not_found']),
+ ('ZAPI', 'license-v2-delete', ZRR['error_object_not_found']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'serial_number': '1-8-000000',
+ 'license_names': 'cifs,fcp',
+ 'state': 'absent',
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_module_remove_unused_expired_zapi():
+ ''' Test remove unused expired license '''
+ register_responses([
+ ('ZAPI', 'license-v2-status-list-info', ZRR['license_status_fcp_site']),
+ ('ZAPI', 'license-v2-delete-unused', ZRR['success']),
+ ('ZAPI', 'license-v2-delete-expired', ZRR['success']),
+ ('ZAPI', 'license-v2-status-list-info', ZRR['license_status_fcp_none']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'remove_unused': True,
+ 'remove_expired': True,
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_module_try_to_remove_non_existent_package_license_zapi():
+ ''' Try to remove non existent license '''
+ register_responses([
+ ('ZAPI', 'license-v2-delete', ZRR['error_object_not_found']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'serial_number': '1-8-000000',
+ 'license_names': 'cifs',
+ 'state': 'absent',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ license_exist = my_obj.remove_licenses('cifs')
+ assert not license_exist
+
+
+def test_module_error_add_license_zapi():
+ ''' Test error add license '''
+ register_responses([
+ ('ZAPI', 'license-v2-add', ZRR['error']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'license_codes': 'random',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert 'Error adding licenses' in expect_and_capture_ansible_exception(my_obj.add_licenses, 'fail')['msg']
+
+
+def test_module_error_remove_license_zapi():
+ ''' Test error remove license '''
+ register_responses([
+ ('ZAPI', 'license-v2-delete', ZRR['error']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'serial_number': '1-8-000000',
+ 'license_names': 'random',
+ 'state': 'absent',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert 'Error removing license' in expect_and_capture_ansible_exception(my_obj.remove_licenses, 'fail', 'random')['msg']
+
+
+def test_module_error_get_and_remove_unused_expired_license_zapi():
+ ''' Test error get and remove unused/expired license '''
+ register_responses([
+ ('ZAPI', 'license-v2-status-list-info', ZRR['error']),
+ ('ZAPI', 'license-v2-delete-unused', ZRR['error']),
+ ('ZAPI', 'license-v2-delete-expired', ZRR['error']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert 'Error checking license status' in expect_and_capture_ansible_exception(my_obj.get_licensing_status, 'fail')['msg']
+ assert 'Error removing unused licenses' in expect_and_capture_ansible_exception(my_obj.remove_unused_licenses, 'fail')['msg']
+ assert 'Error removing expired licenses' in expect_and_capture_ansible_exception(my_obj.remove_expired_licenses, 'fail')['msg']
+
+
+# REST API canned responses when mocking send_request
+SRR = rest_responses({
+ 'error_entry_does_not_exist': (404, None, "entry doesn't exist"),
+ 'license_record': (200, {
+ "num_records": 3,
+ "records": [
+ {
+ "name": "base",
+ "scope": "cluster",
+ "state": "compliant"
+ },
+ {
+ "name": "nfs",
+ "scope": "not_available",
+ "state": "unlicensed"
+ },
+ {
+ "name": "cifs",
+ "scope": "site",
+ "state": "compliant"
+ }]
+ }, None),
+ 'license_record_nfs': (200, {
+ "num_records": 3,
+ "records": [
+ {
+ "name": "base",
+ "scope": "cluster",
+ "state": "compliant"
+ },
+ {
+ "name": "nfs",
+ "scope": "site",
+ "state": "compliant"
+ },
+ {
+ "name": "cifs",
+ "scope": "site",
+ "state": "compliant"
+ }]
+ }, None),
+ 'license_record_no_nfs': (200, {
+ "num_records": 3,
+ "records": [
+ {
+ "name": "base",
+ "scope": "cluster",
+ "state": "compliant"
+ },
+ {
+ "name": "cifs",
+ "scope": "site",
+ "state": "compliant"
+ }]
+ }, None)
+}, False)
+
+
+def test_module_fail_when_unsupported_rest_present():
+ ''' error if unsupported rest properties present '''
+ register_responses([
+ ])
+ module_args = {
+ 'remove_unused': True,
+ 'remove_expired': True,
+ 'use_rest': 'always'
+ }
+ error = 'REST API currently does not support'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_ensure_get_license_status_called_rest():
+ ''' test get'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster/licensing/licenses', SRR['license_record']),
+ ])
+ module_args = {
+ 'use_rest': 'always'
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert_no_warnings()
+
+
+def test_module_error_get_license_rest():
+ ''' test add license'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster/licensing/licenses', SRR['generic_error']),
+ ])
+ module_args = {
+ 'use_rest': 'always'
+ }
+ error = rest_error_message('', 'cluster/licensing/licenses')
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert_no_warnings()
+
+
+def test_module_add_license_rest():
+ ''' test add license'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster/licensing/licenses', SRR['license_record']), # get license information
+ ('POST', 'cluster/licensing/licenses', SRR['empty_good']), # Apply license
+ ('GET', 'cluster/licensing/licenses', SRR['license_record_nfs']), # get updated license information
+ ])
+ module_args = {
+ 'license_codes': 'LICENCECODE',
+ 'use_rest': 'always'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed'] is True
+ if HAS_DEEPDIFF:
+ assert_no_warnings()
+
+
+def test_module_error_add_license_rest():
+ ''' test add license'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster/licensing/licenses', SRR['license_record']), # get license information
+ ('POST', 'cluster/licensing/licenses', SRR['generic_error']), # Error in adding license
+ ])
+ module_args = {
+ 'license_codes': 'INVALIDLICENCECODE',
+ 'use_rest': 'always'
+ }
+ error = 'calling: cluster/licensing/licenses: got Expected error.'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert_no_warnings()
+
+
+def test_module_remove_license():
+ ''' test remove license'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster/licensing/licenses', SRR['license_record_nfs']),
+ ('DELETE', 'cluster/licensing/licenses/nfs', SRR['empty_good']), # remove license
+ ])
+ module_args = {
+ 'license_names': 'nfs',
+ 'serial_number': '1-23-45678',
+ 'state': 'absent',
+ 'use_rest': 'always'
+ }
+ print_warnings()
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed'] is True
+ assert_no_warnings()
+
+
+def test_module_error_remove_license_rest():
+ ''' test remove license error'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster/licensing/licenses', SRR['license_record_nfs']), # get license information
+ ('DELETE', 'cluster/licensing/licenses/nfs', SRR['generic_error']), # Error in removing license
+ ])
+ module_args = {
+ 'license_names': 'nfs',
+ 'serial_number': '1-23-45678',
+ 'state': 'absent',
+ 'use_rest': 'always'
+ }
+ error = rest_error_message('Error removing license for serial number 1-23-45678 and nfs', 'cluster/licensing/licenses/nfs')
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert_no_warnings()
+
+
+def test_module_try_to_remove_license_not_present_rest():
+ ''' test remove license'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster/licensing/licenses', SRR['license_record']),
+ ('DELETE', 'cluster/licensing/licenses/nfs', SRR['error_entry_does_not_exist']), # license not active.
+
+ ])
+ module_args = {
+ 'license_names': 'nfs',
+ 'serial_number': '1-23-45678',
+ 'state': 'absent',
+ 'use_rest': 'always'
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert_no_warnings()
+
+
+@patch('time.sleep')
+def test_error_mismatch_in_package_list_rest(dont_sleep):
+ ''' test remove license'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster/licensing/licenses', SRR['license_record']),
+ # 2nd test
+ ('GET', 'cluster/licensing/licenses', SRR['license_record_no_nfs']),
+ ('GET', 'cluster/licensing/licenses', SRR['license_record_no_nfs']),
+ ('GET', 'cluster/licensing/licenses', SRR['license_record']),
+ # 3rd test
+ ('GET', 'cluster/licensing/licenses', SRR['license_record_no_nfs']),
+ ('GET', 'cluster/licensing/licenses', SRR['license_record_no_nfs']),
+ ('GET', 'cluster/licensing/licenses', SRR['license_record_no_nfs']),
+ ('GET', 'cluster/licensing/licenses', SRR['license_record_no_nfs']),
+ ('GET', 'cluster/licensing/licenses', SRR['license_record_no_nfs']),
+ ])
+ module_args = {
+ 'license_names': 'non-existent-package',
+ 'serial_number': '1-23-45678',
+ 'use_rest': 'always'
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ previous_license_status = {'base': 'compliant', 'nfs': 'unlicensed', 'cifs': 'compliant'}
+ assert my_obj.compare_license_status(previous_license_status) == []
+ previous_license_status = {'base': 'compliant', 'nfs': 'unlicensed', 'cifs': 'unlicensed'}
+ assert my_obj.compare_license_status(previous_license_status) == ['cifs']
+ error = "Error: mismatch in license package names: 'nfs'. Expected:"
+ assert error in expect_and_capture_ansible_exception(my_obj.compare_license_status, 'fail', previous_license_status)['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_license_nlf.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_license_nlf.py
new file mode 100644
index 000000000..b4128499d
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_license_nlf.py
@@ -0,0 +1,461 @@
+# (c) 2023, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test for ONTAP license Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import sys
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import\
+ assert_no_warnings, assert_warning_was_raised, call_main, create_module, expect_and_capture_ansible_exception, patch_ansible, print_warnings
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import\
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_error_message, rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_license import NetAppOntapLicense as my_module, main as my_main, HAS_DEEPDIFF
+
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not be available')
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+}
+
+NLF = """
+{"statusResp":{"statusCode":"SUCCESS","message":"Information sent successfully","filter":"SOA","serialNumber":"12345678","cmatID":"0000000",
+"product":"%s","version":"2","licenses":{"legacyKey":"Generate NetApp License File (NLF)","HostID":"12345678","type":"capacity",
+"package":["CIFS","NFS","S3","FCP","iSCSI","NVMe_oF","FlexClone","SnapRestore","SnapMirror","SnapMirror_Sync","SnapManagerSuite","SnapVault","S3_SnapMirror","VE","TPM"],
+"capacity":"1","evaluation":"false","entitlementLastUpdated":"2023-01-04T07:58:16.000-07:00","licenseScope":"node","licenseProtocol":"ENT_ENCRYPT_ED_CAP_3",
+"enforcementAttributes":[{"name":"DO-Capacity-Warn","metric":"5:1",
+"msg":"You've exceeded your capacity limit. Add capacity to your license to ensure your product use is unaffected.","operatingPolicy":"na"},
+{"name":"DO-Capacity-Enforce","metric":"6:1",
+"msg":"You've exceeded your capacity limit. Add capacity to your license to ensure your product use is unaffected.","operatingPolicy":"ndo"}]}},
+"Signature":"xxxx"}
+""".replace('\n', '')
+
+NLF_EE = NLF % "Enterprise Edition"
+NLF_CB = NLF % "Core Bundle"
+
+NLF_MULTIPLE = "%s\n%s" % (NLF_EE, NLF_CB)
+
+NLF_DICT_NO_PRODUCT = {"statusResp": {"serialNumber": "12345678"}}
+NLF_DICT_NO_SERIAL = {"statusResp": {"product": "Enterprise Edition"}}
+NLF_DICT_PRODUCT_SN = {"statusResp": {"product": "Enterprise Edition", "serialNumber": "12345678"}}
+NLF_DICT_PRODUCT_SN_STAR = {"statusResp": {"product": "Enterprise Edition", "serialNumber": "*"}}
+
+
+def test_module_error_zapi_not_supported():
+ ''' Test add license '''
+ register_responses([
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'license_codes': [NLF_EE],
+ }
+ error = 'Error: NLF license format is not supported with ZAPI.'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ module_args = {
+ 'use_rest': 'never',
+ 'license_codes': [NLF_EE],
+ 'state': 'absent'
+ }
+ error = 'Error: NLF license format is not supported with ZAPI.'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+# REST API canned responses when mocking send_request
+SRR = rest_responses({
+ 'error_entry_does_not_exist': (404, None, "entry doesn't exist"),
+ 'license_record': (200, {
+ "num_records": 3,
+ "records": [
+ {
+ "name": "base",
+ "scope": "cluster",
+ "state": "compliant"
+ },
+ {
+ "name": "nfs",
+ "scope": "not_available",
+ "state": "unlicensed"
+ },
+ {
+ "name": "cifs",
+ "scope": "site",
+ "state": "compliant"
+ }]
+ }, None),
+ 'license_record_nfs': (200, {
+ "num_records": 3,
+ "records": [
+ {
+ "name": "base",
+ "scope": "cluster",
+ "state": "compliant"
+ },
+ {
+ "name": "nfs",
+ "scope": "site",
+ "state": "compliant",
+ "licenses": [
+ {
+ "installed_license": "Enterprise Edition",
+ "serial_number": "12345678",
+ "maximum_size": 1099511627776
+ }
+
+ ]
+ },
+ {
+ "name": "cifs",
+ "scope": "site",
+ "state": "compliant"
+ }]
+ }, None),
+ 'license_record_no_nfs': (200, {
+ "num_records": 3,
+ "records": [
+ {
+ "name": "base",
+ "scope": "cluster",
+ "state": "compliant"
+ },
+ {
+ "name": "cifs",
+ "scope": "site",
+ "state": "compliant"
+ }]
+ }, None),
+ 'conflict_error': (409, None, 'license with conflicts error message'),
+ 'failed_to_install_error': (400, None,
+ 'Failed to install the license at index 0. The system received a licensing request with an invalid digital signature.'),
+}, False)
+
+
+def test_module_add_nlf_license_rest():
+ ''' test add license'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'cluster/licensing/licenses', SRR['license_record']), # get license information
+ ('POST', 'cluster/licensing/licenses', SRR['empty_good']), # Apply license
+ ('GET', 'cluster/licensing/licenses', SRR['license_record_nfs']), # get updated license information
+ ])
+ module_args = {
+ 'license_codes': [NLF_EE],
+ 'use_rest': 'always'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed'] is True
+ if HAS_DEEPDIFF:
+ assert_no_warnings()
+
+
+def test_module_error_add_nlf_license_rest():
+ ''' test add license'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster/licensing/licenses', SRR['license_record']),
+ ('POST', 'cluster/licensing/licenses', SRR['conflict_error']),
+ ('GET', 'cluster/licensing/licenses', SRR['license_record_nfs']), # get updated license information
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster/licensing/licenses', SRR['license_record']),
+ ('POST', 'cluster/licensing/licenses', SRR['failed_to_install_error']),
+ ])
+ module_args = {
+ 'license_codes': [NLF_EE],
+ 'use_rest': 'always'
+ }
+ error = rest_error_message('Error: some licenses were updated, but others were in conflict', 'cluster/licensing/licenses',
+ got='got license with conflicts error message')
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ if HAS_DEEPDIFF:
+ assert_no_warnings()
+ error = rest_error_message('Error adding license', 'cluster/licensing/licenses',
+ got='got Failed to install the license at index 0')
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ if HAS_DEEPDIFF:
+ assert_no_warnings()
+
+
+def test_module_remove_nlf_license():
+ ''' test remove license'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster/licensing/licenses', SRR['license_record_nfs']),
+ ('DELETE', 'cluster/licensing/licenses', SRR['empty_good']),
+ ])
+ module_args = {
+ 'license_codes': [NLF_EE],
+ 'state': 'absent',
+ 'use_rest': 'always'
+ }
+ print_warnings()
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed'] is True
+ assert_no_warnings()
+
+
+def test_module_remove_nlf_license_by_name():
+ ''' test remove license'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster/licensing/licenses', SRR['license_record_nfs']),
+ ('DELETE', 'cluster/licensing/licenses', SRR['empty_good']),
+ ])
+ module_args = {
+ 'license_names': "Enterprise Edition",
+ 'state': 'absent',
+ 'use_rest': 'always',
+ 'serial_number': '12345678'
+ }
+ print_warnings()
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed'] is True
+ assert_no_warnings()
+
+
+def test_module_error_remove_nlf_license_rest():
+ ''' test remove license error'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster/licensing/licenses', SRR['license_record_nfs']),
+ ('DELETE', 'cluster/licensing/licenses', SRR['generic_error']),
+ ])
+ module_args = {
+ 'license_codes': [NLF_EE],
+ 'state': 'absent',
+ 'use_rest': 'always'
+ }
+ error = rest_error_message('Error removing license for serial number 12345678 and Enterprise Edition', 'cluster/licensing/licenses')
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert_no_warnings()
+
+
+def test_module_try_to_remove_nlf_license_not_present_rest():
+ ''' test remove license'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster/licensing/licenses', SRR['license_record']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster/licensing/licenses', SRR['license_record_nfs']),
+ ])
+ module_args = {
+ 'license_codes': [NLF_CB],
+ 'state': 'absent',
+ 'use_rest': 'always'
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert_no_warnings()
+
+
+@patch('time.sleep')
+def test_compare_license_status(dont_sleep):
+ ''' test remove license'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster/licensing/licenses', SRR['license_record']),
+ # 2nd test
+ ('GET', 'cluster/licensing/licenses', SRR['license_record']),
+ # deepdiff 1
+ ('GET', 'cluster/licensing/licenses', SRR['license_record_nfs']),
+ # deepdiff 2
+ ('GET', 'cluster/licensing/licenses', SRR['license_record_nfs']),
+ # retries
+ ('GET', 'cluster/licensing/licenses', SRR['license_record_no_nfs']),
+ ('GET', 'cluster/licensing/licenses', SRR['license_record_no_nfs']),
+ ('GET', 'cluster/licensing/licenses', SRR['license_record']),
+ # Error, no records
+ ('GET', 'cluster/licensing/licenses', SRR['license_record_no_nfs']),
+ ('GET', 'cluster/licensing/licenses', SRR['license_record_no_nfs']),
+ ('GET', 'cluster/licensing/licenses', SRR['license_record_no_nfs']),
+ ('GET', 'cluster/licensing/licenses', SRR['license_record_no_nfs']),
+ ('GET', 'cluster/licensing/licenses', SRR['license_record_no_nfs']),
+ ])
+ module_args = {
+ 'license_names': 'non-existent-package',
+ 'serial_number': '1-23-45678',
+ 'use_rest': 'always'
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ previous_license_status = {'base': 'compliant', 'nfs': 'unlicensed', 'cifs': 'compliant'}
+ assert my_obj.compare_license_status(previous_license_status) == []
+ previous_license_status = {'base': 'compliant', 'nfs': 'compliant', 'cifs': 'compliant'}
+ assert my_obj.compare_license_status(previous_license_status) == ['nfs']
+ previous_license_status = {'base': 'compliant', 'nfs': 'unlicensed', 'cifs': 'compliant'}
+ # deepdiffs
+ my_obj.previous_records = [{'name': 'base', 'scope': 'cluster', 'state': 'compliant'}]
+ assert my_obj.compare_license_status(previous_license_status) == (['nfs', 'cifs'] if HAS_DEEPDIFF else ['nfs'])
+ if HAS_DEEPDIFF:
+ assert_no_warnings()
+ with patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_license.HAS_DEEPDIFF', False):
+ assert my_obj.compare_license_status(previous_license_status) == ['nfs']
+ print_warnings()
+ assert_warning_was_raised('deepdiff is required to identify detailed changes')
+ # retries, success
+ previous_license_status = {'base': 'compliant', 'nfs': 'unlicensed', 'cifs': 'unlicensed'}
+ assert my_obj.compare_license_status(previous_license_status) == (['cifs', 'nfs'] if HAS_DEEPDIFF else ['cifs'])
+ # retries, error
+ error = "Error: mismatch in license package names: 'nfs'. Expected:"
+ assert error in expect_and_capture_ansible_exception(my_obj.compare_license_status, 'fail', previous_license_status)['msg']
+
+
+def test_format_post_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'state': 'absent',
+ 'license_codes': []
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.format_post_error('some_error', {}) == 'some_error'
+ rest_error = 'The system received a licensing request with an invalid digital signature.'
+ error = my_obj.format_post_error(rest_error, {})
+ assert error == rest_error
+ rest_error += ' Failed to install the license at index 0'
+ error = my_obj.format_post_error(rest_error, {'keys': ["'statusResp'"]})
+ assert 'Original NLF contents were modified by Ansible.' in error
+ error = my_obj.format_post_error(rest_error, {'keys': ["'whatever'"]})
+ assert 'Original NLF contents were modified by Ansible.' not in error
+
+
+def test_nlf_is_installed():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'state': 'absent',
+ 'license_codes': []
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert not my_obj.nlf_is_installed(NLF_DICT_NO_PRODUCT)
+ assert not my_obj.nlf_is_installed(NLF_DICT_NO_SERIAL)
+ my_obj.license_status = {}
+ assert not my_obj.nlf_is_installed(NLF_DICT_PRODUCT_SN)
+ my_obj.license_status['installed_licenses'] = []
+ assert my_obj.nlf_is_installed(NLF_DICT_PRODUCT_SN_STAR)
+
+
+def test_validate_delete_action():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ])
+ module_args = {
+ 'use_rest': 'always'
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ error = 'Error: product not found in NLF file'
+ assert error in expect_and_capture_ansible_exception(my_obj.validate_delete_action, 'fail', NLF_DICT_NO_PRODUCT)['msg']
+ error = 'Error: serialNumber not found in NLF file'
+ assert error in expect_and_capture_ansible_exception(my_obj.validate_delete_action, 'fail', NLF_DICT_NO_SERIAL)['msg']
+ my_obj.parameters['serial_number'] = 'otherSN'
+ error = 'Error: mismatch is serial numbers otherSN vs 12345678'
+ assert error in expect_and_capture_ansible_exception(my_obj.validate_delete_action, 'fail', NLF_DICT_PRODUCT_SN)['msg']
+
+
+def test_scan_license_codes_for_nlf():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ])
+ module_args = {
+ 'use_rest': 'always'
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ nlf = NLF_EE.replace("'", "\\'")
+ nlf = nlf.replace('"', "'")
+ license_code, nlf_dict, is_nlf = my_obj.scan_license_codes_for_nlf(nlf)
+ assert len(nlf_dict) == 2
+ assert len(nlf_dict['statusResp']) == 8
+
+ with patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_license.HAS_AST', False):
+ error = 'Error: ast and json packages are required to install NLF license files.'
+ assert error in expect_and_capture_ansible_exception(my_obj.scan_license_codes_for_nlf, 'fail', nlf)['msg']
+
+ with patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_license.HAS_JSON', False):
+ error = 'Error: ast and json packages are required to install NLF license files.'
+ assert error in expect_and_capture_ansible_exception(my_obj.scan_license_codes_for_nlf, 'fail', nlf)['msg']
+
+ with patch('json.dumps') as json_dumps:
+ json_dumps.side_effect = Exception('exception for test')
+ error = 'Error: unable to encode input:'
+ assert error in expect_and_capture_ansible_exception(my_obj.scan_license_codes_for_nlf, 'fail', nlf)['msg']
+
+ with patch('json.loads') as json_loads:
+ json_loads.side_effect = Exception('exception for test')
+ error = 'Error: the license contents cannot be read. Unable to decode input:'
+ assert error in expect_and_capture_ansible_exception(my_obj.scan_license_codes_for_nlf, 'fail', nlf)['msg']
+
+ nlf = "'statusResp':"
+ # older versions of python report unexpected EOF while parsing
+ # but python 3.10.2 reports exception: invalid syntax (<unknown>, line 1)
+ error = "Error: malformed input: 'statusResp':, exception:"
+ assert error in expect_and_capture_ansible_exception(my_obj.scan_license_codes_for_nlf, 'fail', nlf)['msg']
+
+ nlf = '"statusResp":' * 2
+ error = "Error: NLF license files with multiple licenses are not supported, found 2 in"
+ assert error in expect_and_capture_ansible_exception(my_obj.scan_license_codes_for_nlf, 'fail', nlf)['msg']
+ nlf = '"statusResp":' + ('"serialNumber":' * 2)
+ error = "Error: NLF license files with multiple serial numbers are not supported, found 2 in"
+ assert error in expect_and_capture_ansible_exception(my_obj.scan_license_codes_for_nlf, 'fail', nlf)['msg']
+ nlf = '"statusResp":'
+ my_obj.scan_license_codes_for_nlf(nlf)
+ print_warnings()
+ assert_warning_was_raised('The license will be installed without checking for idempotency.', partial_match=True)
+ assert_warning_was_raised('Unable to decode input', partial_match=True)
+ with patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_license.HAS_JSON', False):
+ my_obj.scan_license_codes_for_nlf(nlf)
+ print_warnings()
+ assert_warning_was_raised('The license will be installed without checking for idempotency.', partial_match=True)
+ assert_warning_was_raised('the json package is required to process NLF license files', partial_match=True)
+
+
+def test_error_nlf_and_legacy():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'license_codes': [NLF, 'xxxxxxxxxxxxxxxx']
+ }
+ error = 'Error: cannot mix legacy licenses and NLF licenses; found 1 NLF licenses out of 2 license_codes.'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_split_nlfs():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'license_codes': [NLF_MULTIPLE]
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert len(my_obj.parameters['license_codes']) == 2
+ # force error:
+ error = 'Error: unexpected format found 2 entries and 3 lines'
+ assert error in expect_and_capture_ansible_exception(my_obj.split_nlf, 'fail', '%s\nyyyyy' % NLF_MULTIPLE)['msg']
+
+
+def test_remove_licenses_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'license_codes': [NLF_MULTIPLE]
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ error = 'Error: serial_number is required to delete a license.'
+ assert error in expect_and_capture_ansible_exception(my_obj.remove_licenses_rest, 'fail', 'bundle name', {})['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_local_hosts.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_local_hosts.py
new file mode 100644
index 000000000..15de03a8b
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_local_hosts.py
@@ -0,0 +1,178 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import sys
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args, \
+ patch_ansible, create_and_apply, create_module, expect_and_capture_ansible_exception
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import get_mock_record, \
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_local_hosts \
+ import NetAppOntapLocalHosts as my_module, main as my_main # module under test
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+SRR = rest_responses({
+ 'host_record': (200, {
+ "records": [
+ {
+ "owner": {"name": "svm", "uuid": "e3cb5c7fcd20"},
+ "address": "10.10.10.10",
+ "host": "example.com",
+ "aliases": ["ex1.com", "ex2.com"]
+ }],
+ "num_records": 1
+ }, None),
+})
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'address': '10.10.10.10',
+ 'owner': 'svm',
+}
+
+
+def test_get_local_host_rest_none():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'name-services/local-hosts', SRR['empty_records'])
+ ])
+ module_args = {'address': '10.10.10.10', 'owner': 'svm'}
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.get_local_host_rest() is None
+
+
+def test_get_local_host_rest_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'name-services/local-hosts', SRR['generic_error'])
+ ])
+ module_args = {'address': '10.10.10.10', 'owner': 'svm'}
+ my_module_object = create_module(my_module, DEFAULT_ARGS, module_args)
+ msg = 'Error fetching IP to hostname mappings for svm: calling: name-services/local-hosts: got Expected error.'
+ assert msg in expect_and_capture_ansible_exception(my_module_object.get_local_host_rest, 'fail')['msg']
+
+
+def test_create_local_host_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'name-services/local-hosts', SRR['empty_records']),
+ ('POST', 'name-services/local-hosts', SRR['empty_good'])
+ ])
+ module_args = {
+ 'address': '10.10.10.10',
+ 'owner': 'svm',
+ 'host': 'example.com',
+ 'aliases': ['ex.com', 'ex1.com']}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_create_local_host_rest_none():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'name-services/local-hosts', SRR['empty_records']),
+ ('POST', 'name-services/local-hosts', SRR['generic_error'])
+ ])
+ module_args = {
+ 'address': '10.10.10.10',
+ 'owner': 'svm',
+ 'host': 'example.com',
+ 'aliases': ['ex.com', 'ex1.com']}
+ error = create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ print('Info: %s' % error)
+ msg = 'Error creating IP to hostname mappings for svm: calling: name-services/local-hosts: got Expected error.'
+ assert msg in error
+
+
+def test_create_local_host_rest_idempotency():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'name-services/local-hosts', SRR['host_record'])
+ ])
+ module_args = {'state': 'present'}
+ assert not create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_delete_local_host():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'name-services/local-hosts', SRR['host_record']),
+ ('DELETE', 'name-services/local-hosts/e3cb5c7fcd20/10.10.10.10', SRR['empty_good'])
+ ])
+ module_args = {'state': 'absent'}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_delete_local_host_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'name-services/local-hosts', SRR['host_record']),
+ ('DELETE', 'name-services/local-hosts/e3cb5c7fcd20/10.10.10.10', SRR['generic_error'])
+ ])
+ module_args = {'state': 'absent'}
+ error = create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ print('Info: %s' % error)
+ msg = 'Error deleting IP to hostname mappings for svm: calling: name-services/local-hosts/e3cb5c7fcd20/10.10.10.10: got Expected error.'
+ assert msg in error
+
+
+def test_delete_local_host_rest_idempotency():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'name-services/local-hosts', SRR['empty_records'])
+ ])
+ module_args = {'state': 'absent'}
+ assert not create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_local_host():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'name-services/local-hosts', SRR['host_record']),
+ ('PATCH', 'name-services/local-hosts/e3cb5c7fcd20/10.10.10.10', SRR['empty_good'])
+ ])
+ module_args = {
+ 'address': '10.10.10.10',
+ 'owner': 'svm',
+ 'host': 'example1.com',
+ 'aliases': ['ex.com', 'ex1.com', 'ex2.com']}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_local_host_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'name-services/local-hosts', SRR['host_record']),
+ ('PATCH', 'name-services/local-hosts/e3cb5c7fcd20/10.10.10.10', SRR['generic_error'])
+ ])
+ module_args = {
+ 'address': '10.10.10.10',
+ 'owner': 'svm',
+ 'host': 'example1.com',
+ 'aliases': ['ex.com', 'ex1.com', 'ex2.com']}
+ error = create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ print('Info: %s' % error)
+ msg = 'Error updating IP to hostname mappings for svm: calling: name-services/local-hosts/e3cb5c7fcd20/10.10.10.10: got Expected error.'
+ assert msg in error
+
+
+def validate_input_ipaddress():
+ register_responses([
+ ])
+ module_args = {'address': '2001:0000:3238:DFE1:63:0000:0000:FEFBSS', 'owner': 'svm'}
+ error = create_module(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ print('Info: %s' % error)
+ msg = 'Error: Invalid IP address value 2001:0000:3238:DFE1:63:0000:0000:FEFBSS'
+ assert msg in error
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_log_forward.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_log_forward.py
new file mode 100644
index 000000000..5214b76d2
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_log_forward.py
@@ -0,0 +1,343 @@
+''' unit tests ONTAP Ansible module: na_ontap_log_forward '''
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat import unittest
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch, Mock
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_log_forward \
+ import NetAppOntapLogForward as log_forward_module # module under test
+
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+# REST API canned responses when mocking send_request
+SRR = {
+ # common responses
+ 'is_rest': (200, {}, None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'end_of_sequence': (500, None, "Ooops, the UT needs one more SRR response"),
+ 'generic_error': (400, None, "Expected error"),
+ # module specific responses
+ 'log_forward_record': (200, {
+ "records": [{
+ "address": "10.11.12.13",
+ "facility": "user",
+ "port": 514,
+ "protocol": "udp_unencrypted",
+ "verify_server": False
+ }]
+ }, None)
+}
+
+
+class MockONTAPConnection(object):
+ ''' mock server connection to ONTAP host '''
+
+ def __init__(self, kind=None):
+ ''' save arguments '''
+ self.type = kind
+ self.xml_in = None
+ self.xml_out = None
+
+ def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
+ ''' mock invoke_successfully returning xml data '''
+ self.xml_in = xml
+ if self.type == 'log_forward':
+ xml = self.build_log_forward_info()
+ elif self.type == 'log_forward_fail':
+ raise netapp_utils.zapi.NaApiError(code='TEST', message="This exception is from the unit test")
+ self.xml_out = xml
+ return xml
+
+ @staticmethod
+ def build_log_forward_info():
+ ''' build xml data for cluster-log-forward-info '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ data = {
+ 'attributes': {
+ 'cluster-log-forward-info': {
+ 'destination': '10.11.12.13',
+ 'facility': 'user',
+ 'port': '514',
+ 'protocol': 'udp_unencrypted',
+ 'verify-server': 'false'
+ }
+ }
+ }
+
+ xml.translate_struct(data)
+ return xml
+
+
+class TestMyModule(unittest.TestCase):
+ ''' a group of related Unit Tests '''
+
+ def setUp(self):
+ self.server = MockONTAPConnection()
+ self.onbox = False
+
+ def set_default_args(self, use_rest=None):
+ if self.onbox:
+ hostname = '10.10.10.10'
+ username = 'username'
+ password = 'password'
+ destination = '10.11.12.13'
+ port = 514
+ facility = 'user'
+ force = True
+ protocol = 'udp_unencrypted'
+ verify_server = False
+ else:
+ hostname = '10.10.10.10'
+ username = 'username'
+ password = 'password'
+ destination = '10.11.12.13'
+ port = 514
+ facility = 'user'
+ force = True
+ protocol = 'udp_unencrypted'
+ verify_server = False
+
+ args = dict({
+ 'state': 'present',
+ 'hostname': hostname,
+ 'username': username,
+ 'password': password,
+ 'destination': destination,
+ 'port': port,
+ 'facility': facility,
+ 'force': force,
+ 'protocol': protocol,
+ 'verify_server': verify_server
+ })
+
+ if use_rest is not None:
+ args['use_rest'] = use_rest
+
+ return args
+
+ @staticmethod
+ def get_log_forward_mock_object(cx_type='zapi', kind=None):
+ log_forward_obj = log_forward_module()
+ if cx_type == 'zapi':
+ if kind is None:
+ log_forward_obj.server = MockONTAPConnection()
+ else:
+ log_forward_obj.server = MockONTAPConnection(kind=kind)
+ return log_forward_obj
+
+ def test_module_fail_when_required_args_missing(self):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ log_forward_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+ def test_ensure_get_called(self):
+ ''' test get_log_forward_config for non-existent config'''
+ set_module_args(self.set_default_args(use_rest='Never'))
+ print('starting')
+ my_obj = log_forward_module()
+ print('use_rest:', my_obj.use_rest)
+ my_obj.server = self.server
+ assert my_obj.get_log_forward_config is not None
+
+ def test_ensure_get_called_existing(self):
+ ''' test get_log_forward_config for existing config'''
+ set_module_args(self.set_default_args(use_rest='Never'))
+ my_obj = log_forward_module()
+ my_obj.server = MockONTAPConnection(kind='log_forward')
+ assert my_obj.get_log_forward_config()
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_log_forward.NetAppOntapLogForward.create_log_forward_config')
+ def test_successful_create(self, create_log_forward_config):
+ ''' creating log_forward config and testing idempotency '''
+ set_module_args(self.set_default_args(use_rest='Never'))
+ my_obj = log_forward_module()
+ my_obj.ems_log_event = Mock(return_value=None)
+ if not self.onbox:
+ my_obj.server = self.server
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+ create_log_forward_config.assert_called_with()
+ # to reset na_helper from remembering the previous 'changed' value
+ set_module_args(self.set_default_args(use_rest='Never'))
+ my_obj = log_forward_module()
+ my_obj.ems_log_event = Mock(return_value=None)
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('log_forward')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_log_forward.NetAppOntapLogForward.destroy_log_forward_config')
+ def test_successful_delete(self, destroy_log_forward):
+ ''' deleting log_forward config and testing idempotency '''
+ data = self.set_default_args(use_rest='Never')
+ data['state'] = 'absent'
+ set_module_args(data)
+ my_obj = log_forward_module()
+ my_obj.ems_log_event = Mock(return_value=None)
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('log_forward')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+ # destroy_log_forward_config.assert_called_with()
+ # to reset na_helper from remembering the previous 'changed' value
+ my_obj = log_forward_module()
+ my_obj.ems_log_event = Mock(return_value=None)
+ if not self.onbox:
+ my_obj.server = self.server
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_log_forward.NetAppOntapLogForward.modify_log_forward_config')
+ def test_successful_modify(self, modify_log_forward_config):
+ ''' modifying log_forward config and testing idempotency '''
+ data = self.set_default_args(use_rest='Never')
+ data['facility'] = 'kern'
+ set_module_args(data)
+ my_obj = log_forward_module()
+ my_obj.ems_log_event = Mock(return_value=None)
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('log_forward')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+
+ # modify_log_forward_config.assert_called_with()
+ # to reset na_helper from remembering the previous 'changed' value
+ data['facility'] = 'user'
+ set_module_args(data)
+ my_obj = log_forward_module()
+ my_obj.ems_log_event = Mock(return_value=None)
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('log_forward')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert not exc.value.args[0]['changed']
+
+ def test_if_all_methods_catch_exception(self):
+ data = self.set_default_args(use_rest='Never')
+ set_module_args(data)
+ my_obj = log_forward_module()
+ my_obj.ems_log_event = Mock(return_value=None)
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('log_forward_fail')
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.create_log_forward_config()
+ assert 'Error creating log forward config with destination ' in exc.value.args[0]['msg']
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.destroy_log_forward_config()
+ assert 'Error destroying log forward destination ' in exc.value.args[0]['msg']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_error(self, mock_request):
+ data = self.set_default_args()
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['generic_error'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_log_forward_mock_object(cx_type='rest').apply()
+ assert exc.value.args[0]['msg'] == SRR['generic_error'][2]
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_successful_create_rest(self, mock_request):
+ data = self.set_default_args()
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['empty_good'], # get
+ SRR['empty_good'], # post
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_log_forward_mock_object(cx_type='rest').apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_idempotent_create_rest(self, mock_request):
+ data = self.set_default_args()
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['log_forward_record'], # get
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_log_forward_mock_object(cx_type='rest').apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_successful_delete_rest(self, mock_request):
+ data = self.set_default_args()
+ data['state'] = 'absent'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['log_forward_record'], # get
+ SRR['empty_good'], # delete
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_log_forward_mock_object(cx_type='rest').apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_idempotent_delete_rest(self, mock_request):
+ data = self.set_default_args()
+ data['state'] = 'absent'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['empty_good'], # get
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_log_forward_mock_object(cx_type='rest').apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_successful_modify_rest(self, mock_request):
+ data = self.set_default_args()
+ data['state'] = 'present'
+ data['facility'] = 'kern'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['log_forward_record'], # get
+ SRR['empty_good'], # delete
+ SRR['empty_good'], # post
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_log_forward_mock_object(cx_type='rest').apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_idempotent_modify_rest(self, mock_request):
+ data = self.set_default_args()
+ data['state'] = 'present'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['log_forward_record'], # get
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_log_forward_mock_object(cx_type='rest').apply()
+ assert not exc.value.args[0]['changed']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_login_messages.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_login_messages.py
new file mode 100644
index 000000000..ac628e8e2
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_login_messages.py
@@ -0,0 +1,332 @@
+# (c) 2019, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests for Ansible module: na_ontap_login_messages'''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import call_main, patch_ansible
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_error_message, rest_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import\
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_error_message, zapi_responses
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_login_messages import main as my_main # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+HAS_NETAPP_ZAPI_MSG = "pip install netapp_lib is required"
+
+
+# REST API canned responses when mocking send_request
+SRR = rest_responses({
+ 'svm_uuid': (200, {"records": [{"uuid": "test_uuid"}], "num_records": 1}, None),
+ 'login_info': (200, {
+ "records": [{
+ "banner": "banner",
+ "message": "message",
+ "show_cluster_message": True,
+ "uuid": "uuid_uuid"
+ }],
+ "num_records": 1}, None),
+ 'login_info_trailing_newline': (200, {
+ "records": [{
+ "banner": "banner\n",
+ "message": "message\n",
+ "show_cluster_message": True,
+ "uuid": "uuid_uuid"
+ }],
+ "num_records": 1}, None),
+})
+
+
+banner_info = {
+ 'num-records': 1,
+ 'attributes-list': [{'vserver-login-banner-info': {
+ 'message': 'banner message',
+ }}]}
+
+
+banner_info_empty = {
+ 'num-records': 1,
+ 'attributes-list': [{'vserver-login-banner-info': {
+ 'message': '-',
+ 'vserver': 'vserver'
+ }}]}
+
+
+motd_info = {
+ 'num-records': 1,
+ 'attributes-list': [{'vserver-motd-info': {
+ 'is-cluster-message-enabled': 'true',
+ 'message': 'motd message',
+ 'vserver': 'vserver'
+ }}]}
+
+
+motd_info_empty = {
+ 'num-records': 1,
+ 'attributes-list': [{'vserver-motd-info': {
+ 'is-cluster-message-enabled': 'true',
+ 'vserver': 'vserver'
+ }}]}
+
+
+ZRR = zapi_responses({
+ 'banner_info': build_zapi_response(banner_info),
+ 'banner_info_empty': build_zapi_response(banner_info_empty),
+ 'motd_info': build_zapi_response(motd_info),
+ 'motd_info_empty': build_zapi_response(motd_info_empty),
+})
+
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+}
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ register_responses([
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ }
+ assert "Error: vserver is a required parameter when using ZAPI." == call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.HAS_NETAPP_LIB', False)
+def test_module_fail_when_netapp_lib_missing():
+ ''' required lib missing '''
+ module_args = {
+ 'use_rest': 'never',
+ }
+ assert 'Error: the python NetApp-Lib module is required. Import error: None' in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_successfully_create_banner():
+ register_responses([
+ ('ZAPI', 'vserver-motd-get-iter', ZRR['motd_info']),
+ ('ZAPI', 'vserver-login-banner-get-iter', ZRR['no_records']),
+ ('ZAPI', 'vserver-login-banner-modify-iter', ZRR['success']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'vserver': 'vserver',
+ 'banner': 'test banner',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_create_banner_idempotency():
+ register_responses([
+ ('ZAPI', 'vserver-motd-get-iter', ZRR['motd_info']),
+ ('ZAPI', 'vserver-login-banner-get-iter', ZRR['banner_info']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'vserver': 'vserver',
+ 'banner': 'banner message',
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successfully_create_motd():
+ register_responses([
+ ('ZAPI', 'vserver-motd-get-iter', ZRR['motd_info_empty']),
+ ('ZAPI', 'vserver-login-banner-get-iter', ZRR['banner_info_empty']),
+ ('ZAPI', 'vserver-motd-modify-iter', ZRR['success']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'vserver': 'vserver',
+ 'motd_message': 'test message',
+ 'show_cluster_motd': False
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_create_motd_idempotency():
+ register_responses([
+ ('ZAPI', 'vserver-motd-get-iter', ZRR['motd_info']),
+ ('ZAPI', 'vserver-login-banner-get-iter', ZRR['banner_info']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'vserver': 'vserver',
+ 'motd_message': 'motd message',
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_create_motd_modify():
+ register_responses([
+ ('ZAPI', 'vserver-motd-get-iter', ZRR['motd_info']),
+ ('ZAPI', 'vserver-login-banner-get-iter', ZRR['banner_info']),
+ ('ZAPI', 'vserver-motd-modify-iter', ZRR['success']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'vserver': 'vserver',
+ 'motd_message': 'motd message',
+ 'show_cluster_motd': False
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_get_banner_error():
+ register_responses([
+ ('ZAPI', 'vserver-motd-get-iter', ZRR['motd_info']),
+ ('ZAPI', 'vserver-login-banner-get-iter', ZRR['error']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'vserver': 'vserver',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg'] == zapi_error_message('Error fetching login_banner info')
+
+
+def test_get_motd_error():
+ register_responses([
+ ('ZAPI', 'vserver-motd-get-iter', ZRR['error']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'vserver': 'vserver',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg'] == zapi_error_message('Error fetching motd info')
+
+
+def test_modify_banner_error():
+ register_responses([
+ ('ZAPI', 'vserver-motd-get-iter', ZRR['no_records']),
+ ('ZAPI', 'vserver-login-banner-get-iter', ZRR['banner_info']),
+ ('ZAPI', 'vserver-login-banner-modify-iter', ZRR['error']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'vserver': 'vserver',
+ 'banner': 'modify to new banner',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg'] == zapi_error_message('Error modifying login_banner')
+
+
+def test_modify_motd_error():
+ register_responses([
+ ('ZAPI', 'vserver-motd-get-iter', ZRR['motd_info']),
+ ('ZAPI', 'vserver-login-banner-get-iter', ZRR['banner_info']),
+ ('ZAPI', 'vserver-motd-modify-iter', ZRR['error']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'vserver': 'vserver',
+ 'motd_message': 'modify to new motd',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg'] == zapi_error_message('Error modifying motd')
+
+
+def test_successfully_create_banner_rest():
+ register_responses([
+ # no vserver, cluster scope
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'security/login/messages', SRR['login_info']),
+ ('PATCH', 'security/login/messages/uuid_uuid', SRR['success']),
+ # with vserver
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'security/login/messages', SRR['zero_records']),
+ ('GET', 'svm/svms', SRR['svm_uuid']),
+ ('PATCH', 'security/login/messages/test_uuid', SRR['success']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'banner': 'test banner',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ module_args['vserver'] = 'vserver'
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_banner_rest():
+ register_responses([
+ # no vserver, cluster scope
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'security/login/messages', SRR['login_info']),
+ ('PATCH', 'security/login/messages/uuid_uuid', SRR['success']),
+ # idempotent check
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'security/login/messages', SRR['login_info_trailing_newline'])
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'banner': 'banner\n',
+ 'message': 'message\n',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed'] is False
+
+
+def test_successfully_create_motd_rest():
+ register_responses([
+ # no vserver, cluster scope
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'security/login/messages', SRR['login_info']),
+ ('PATCH', 'security/login/messages/uuid_uuid', SRR['success']),
+ # with vserver
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'security/login/messages', SRR['login_info']),
+ ('PATCH', 'security/login/messages/uuid_uuid', SRR['success']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'motd_message': 'test motd',
+ 'show_cluster_motd': False
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ module_args['vserver'] = 'vserver'
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_banner_error_rest():
+ register_responses([
+ # no vserver, cluster scope
+ # error fetching info
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'security/login/messages', SRR['generic_error']),
+ # error no info at cluster level
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'security/login/messages', SRR['zero_records']),
+ # with vserver
+ # error fetching SVM UUID
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'security/login/messages', SRR['zero_records']),
+ ('GET', 'svm/svms', SRR['generic_error']),
+ # error, SVM not found
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'security/login/messages', SRR['zero_records']),
+ ('GET', 'svm/svms', SRR['zero_records']),
+ # error, on patch
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'security/login/messages', SRR['login_info']),
+ ('PATCH', 'security/login/messages/uuid_uuid', SRR['generic_error']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'banner': 'test banner',
+ # 'show_cluster_motd': False
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg'] == rest_error_message(
+ 'Error fetching login_banner info', 'security/login/messages')
+ assert call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg'] == 'Error fetching login_banner info for cluster - no data.'
+ module_args['vserver'] = 'vserver'
+ assert call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg'] == rest_error_message('Error fetching vserver vserver', 'svm/svms')
+ assert call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg'] ==\
+ 'Error fetching vserver vserver. Please make sure vserver name is correct. For cluster vserver, don\'t set vserver.'
+ assert call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg'] == rest_error_message(
+ 'Error modifying banner', 'security/login/messages/uuid_uuid')
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_lun.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_lun.py
new file mode 100644
index 000000000..5331458e1
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_lun.py
@@ -0,0 +1,308 @@
+# (c) 2020, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test template for ONTAP Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import\
+ call_main, create_module, expect_and_capture_ansible_exception, patch_ansible
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import\
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_error, build_zapi_response, zapi_error_message, zapi_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_lun import NetAppOntapLUN as my_module, main as my_main # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+def lun_info(name, next_tag=None):
+ info = {
+ 'num-records': 1,
+ 'attributes-list': [{
+ 'lun_info': {
+ 'path': "/what/ever/%s" % name,
+ 'size': 5368709120,
+ 'is-space-alloc-enabled': "false",
+ 'is-space-reservation-enabled': "true",
+ 'multiprotocol-type': 'linux',
+ 'qos-policy-group': 'qospol',
+ 'qos-adaptive-policy-group': 'qosadppol',
+ }
+ }]
+ }
+ if next_tag:
+ info['next-tag'] = next_tag
+ return info
+
+
+ZRR = zapi_responses({
+ 'lun_info': build_zapi_response(lun_info('lun_name')),
+ 'lun_info_from': build_zapi_response(lun_info('lun_from_name')),
+ 'lun_info_with_tag': build_zapi_response(lun_info('lun_name', 'more to come')),
+ 'error_9042': build_zapi_error(9042, 'new size == old size, more or less'),
+})
+
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'use_rest': 'use_rest',
+ 'name': 'lun_name',
+ 'vserver': 'lunsvm_name',
+}
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.HAS_NETAPP_LIB', False)
+def test_module_fail_when_netapp_lib_missing():
+ ''' required lib missing '''
+ module_args = {
+ 'use_rest': 'never',
+ }
+ assert 'Error: the python NetApp-Lib module is required. Import error: None' in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ register_responses([
+ ])
+ module_args = {
+ 'use_rest': 'never'
+ }
+ print('Info: %s' % call_main(my_main, {}, module_args, fail=True)['msg'])
+
+
+def test_create_error_missing_param():
+ ''' Test if create throws an error if required param 'destination_vserver' is not specified'''
+ register_responses([
+ ('ZAPI', 'lun-get-iter', ZRR['no_records']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ }
+ msg = "Error: 'flexvol_name' option is required when using ZAPI."
+ assert msg == call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ module_args['flexvol_name'] = 'xxx'
+ msg = 'size is a required parameter for create.'
+ assert msg == call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_successful_create():
+ ''' Test successful create '''
+ register_responses([
+ ('ZAPI', 'lun-get-iter', ZRR['no_records']),
+ ('ZAPI', 'lun-create-by-size', ZRR['success']),
+ # second create
+ ('ZAPI', 'lun-get-iter', ZRR['no_records']),
+ ('ZAPI', 'lun-create-by-size', ZRR['success']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'comment': 'some comment',
+ 'flexvol_name': 'vol_name',
+ 'qos_adaptive_policy_group': 'new_adaptive_pol',
+ 'size': 5,
+ 'space_allocation': False,
+ 'space_reserve': False,
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ module_args = {
+ 'use_rest': 'never',
+ 'comment': 'some comment',
+ 'flexvol_name': 'vol_name',
+ 'os_type': 'windows',
+ 'qos_policy_group': 'new_pol',
+ 'size': 5,
+ 'space_allocation': False,
+ 'space_reserve': False,
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_create_rename_idempotency():
+ ''' Test create idempotency '''
+ register_responses([
+ ('ZAPI', 'lun-get-iter', ZRR['lun_info']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'flexvol_name': 'vol_name',
+ 'size': 5,
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_delete_lun():
+ ''' Test delete and idempotency '''
+ register_responses([
+ ('ZAPI', 'lun-get-iter', ZRR['lun_info']),
+ ('ZAPI', 'lun-destroy', ZRR['success']),
+ # idempotency
+ ('ZAPI', 'lun-get-iter', ZRR['no_records']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'flexvol_name': 'vol_name',
+ 'state': 'absent',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ module_args
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_delete_lun_no_input():
+ ''' Nothing to delete! '''
+ register_responses([
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'state': 'absent',
+ }
+ msg = "Error: 'flexvol_name' option is required when using ZAPI."
+ assert msg == call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_successful_resize():
+ ''' Test successful resize '''
+ register_responses([
+ ('ZAPI', 'lun-get-iter', ZRR['lun_info']),
+ ('ZAPI', 'lun-resize', ZRR['success']),
+ ('ZAPI', 'lun-get-iter', ZRR['lun_info']),
+ ('ZAPI', 'lun-resize', ZRR['error_9042']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'flexvol_name': 'vol_name',
+ 'size': 7
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successful_modify():
+ ''' Test successful modify '''
+ register_responses([
+ ('ZAPI', 'lun-get-iter', ZRR['lun_info']),
+ ('ZAPI', 'lun-set-comment', ZRR['success']),
+ ('ZAPI', 'lun-set-qos-policy-group', ZRR['success']),
+ ('ZAPI', 'lun-set-space-alloc', ZRR['success']),
+ # second call
+ ('ZAPI', 'lun-get-iter', ZRR['lun_info']),
+ ('ZAPI', 'lun-set-comment', ZRR['success']),
+ ('ZAPI', 'lun-set-qos-policy-group', ZRR['success']),
+ ('ZAPI', 'lun-set-space-reservation-info', ZRR['success']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'comment': 'some comment',
+ 'flexvol_name': 'vol_name',
+ 'qos_policy_group': 'new_pol',
+ 'space_allocation': True,
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ module_args = {
+ 'use_rest': 'never',
+ 'comment': 'some comment',
+ 'flexvol_name': 'vol_name',
+ 'qos_adaptive_policy_group': 'new_adaptive_pol',
+ 'space_allocation': False,
+ 'space_reserve': False,
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_negative_modify():
+ ''' Test successful modify '''
+ register_responses([
+ ('ZAPI', 'lun-get-iter', ZRR['lun_info']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'flexvol_name': 'vol_name',
+ 'comment': 'some comment',
+ 'os_type': 'windows',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg'] == 'os_type cannot be modified: current: linux, desired: windows'
+
+
+def test_successful_rename():
+ ''' Test successful create '''
+ register_responses([
+ ('ZAPI', 'lun-get-iter', ZRR['no_records']),
+ ('ZAPI', 'lun-get-iter', ZRR['lun_info_from']),
+ ('ZAPI', 'lun-move', ZRR['success']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'flexvol_name': 'vol_name',
+ 'from_name': 'lun_from_name'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_failed_rename():
+ ''' Test failed rename '''
+ register_responses([
+ ('ZAPI', 'lun-get-iter', ZRR['no_records']),
+ ('ZAPI', 'lun-get-iter', ZRR['no_records']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'flexvol_name': 'vol_name',
+ 'from_name': 'lun_from_name'
+ }
+ msg = 'Error renaming lun: lun_from_name does not exist'
+ assert msg == call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_zapi_errors():
+ register_responses([
+ # get error
+ ('ZAPI', 'lun-get-iter', ZRR['error']),
+ # error on next tag
+ ('ZAPI', 'lun-get-iter', ZRR['lun_info_with_tag']),
+ ('ZAPI', 'lun-get-iter', ZRR['lun_info_with_tag']),
+ ('ZAPI', 'lun-get-iter', ZRR['error']),
+ # create error
+ ('ZAPI', 'lun-create-by-size', ZRR['error']),
+ # resize error
+ ('ZAPI', 'lun-resize', ZRR['error']),
+ # rename error
+ ('ZAPI', 'lun-move', ZRR['error']),
+ # modify error
+ ('ZAPI', 'lun-set-space-reservation-info', ZRR['error']),
+ # delete error
+ ('ZAPI', 'lun-destroy', ZRR['error']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'flexvol_name': 'vol_name',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ msg = 'Error fetching luns for vol_name'
+ assert zapi_error_message(msg) == expect_and_capture_ansible_exception(my_obj.get_luns, 'fail')['msg']
+ assert zapi_error_message(msg) == expect_and_capture_ansible_exception(my_obj.get_luns, 'fail')['msg']
+
+ my_obj.parameters['size'] = 123456
+ msg = 'Error provisioning lun lun_name of size 123456'
+ assert zapi_error_message(msg) == expect_and_capture_ansible_exception(my_obj.create_lun, 'fail')['msg']
+
+ msg = 'Error resizing lun path'
+ assert zapi_error_message(msg) == expect_and_capture_ansible_exception(my_obj.resize_lun, 'fail', 'path')['msg']
+
+ my_obj.parameters.pop('size')
+ msg = 'Error moving lun old_path'
+ assert zapi_error_message(msg) == expect_and_capture_ansible_exception(my_obj.rename_lun, 'fail', 'old_path', 'new_path')['msg']
+
+ msg = 'Error setting lun option space_reserve'
+ assert zapi_error_message(msg) == expect_and_capture_ansible_exception(my_obj.modify_lun, 'fail', 'path', {'space_reserve': True})['msg']
+
+ msg = 'Error deleting lun path'
+ assert zapi_error_message(msg) == expect_and_capture_ansible_exception(my_obj.delete_lun, 'fail', 'path')['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_lun_app_rest.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_lun_app_rest.py
new file mode 100644
index 000000000..0a874f2a6
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_lun_app_rest.py
@@ -0,0 +1,584 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test template for ONTAP Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import copy
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat import unittest
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch, Mock, call
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible, assert_warning_was_raised, print_warnings
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_lun \
+ import NetAppOntapLUN as my_module # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+# REST API canned responses when mocking send_request
+SRR = {
+ # common responses
+ 'is_rest_96': (200, dict(version=dict(generation=9, major=6, minor=0, full='dummy')), None),
+ 'is_rest_97': (200, dict(version=dict(generation=9, major=7, minor=0, full='dummy')), None),
+ 'is_rest_98': (200, dict(version=dict(generation=9, major=8, minor=0, full='dummy')), None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {'records': []}, None),
+ 'end_of_sequence': (500, None, "Unexpected call to send_request"),
+ 'generic_error': (400, None, "Expected error"),
+ # module specific responses
+ 'get_apps_empty': (200,
+ {'records': [],
+ 'num_records': 0
+ },
+ None
+ ),
+ 'get_apps_found': (200,
+ {'records': [dict(name='san_appli', uuid='1234')],
+ 'num_records': 1
+ },
+ None
+ ),
+ 'get_app_components': (200,
+ {'records': [dict(name='san_appli', uuid='1234')],
+ 'num_records': 1
+ },
+ None
+ ),
+ 'get_app_details': (200,
+ dict(name='san_appli', uuid='1234',
+ san=dict(application_components=[dict(name='lun_name', lun_count=3, total_size=1000)]),
+ statistics=dict(space=dict(provisioned=1100))
+ ),
+ None
+ ),
+ 'get_app_component_details': (200,
+ {'backing_storage': dict(luns=[]),
+ },
+ None
+ ),
+ 'get_volumes_found': (200,
+ {'records': [dict(name='san_appli', uuid='1234')],
+ 'num_records': 1
+ },
+ None
+ ),
+ 'get_lun_path': (200,
+ {'records': [{'uuid': '1234', 'path': '/vol/lun_name/lun_name'}],
+ 'num_records': 1
+ },
+ None
+ ),
+ 'one_lun': (200,
+ {'records': [{
+ 'uuid': "1234",
+ 'name': '/vol/lun_name/lun_name',
+ 'path': '/vol/lun_name/lun_name',
+ 'size': 9871360,
+ 'comment': None,
+ 'flexvol_name': None,
+ 'os_type': 'xyz',
+ 'qos_policy_group': None,
+ 'space_reserve': False,
+ 'space_allocation': False
+ }],
+ }, None),
+ 'get_storage': (200,
+ {'backing_storage': dict(luns=[{'path': '/vol/lun_name/lun_name',
+ 'uuid': '1234',
+ 'size': 15728640,
+ 'creation_timestamp': '2022-07-26T20:35:50+00:00'
+ }]),
+ }, None),
+
+}
+
+
+class MockONTAPConnection(object):
+ ''' mock server connection to ONTAP host '''
+
+ def __init__(self, kind=None, parm1=None):
+ ''' save arguments '''
+ self.type = kind
+ self.parm1 = parm1
+ self.xml_in = None
+ self.xml_out = None
+
+ def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
+ ''' mock invoke_successfully returning xml data '''
+ self.xml_in = xml
+ if self.type == 'lun':
+ xml = self.build_lun_info(self.parm1)
+ self.xml_out = xml
+ return xml
+
+ @staticmethod
+ def build_lun_info(lun_name):
+ ''' build xml data for lun-info '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ lun = dict(
+ lun_info=dict(
+ path="/what/ever/%s" % lun_name,
+ size=10
+ )
+ )
+ attributes = {
+ 'num-records': 1,
+ 'attributes-list': [lun]
+ }
+ xml.translate_struct(attributes)
+ return xml
+
+
+class TestMyModule(unittest.TestCase):
+ ''' a group of related Unit Tests '''
+
+ def setUp(self):
+ self.mock_lun_args = {
+ 'vserver': 'ansible',
+ 'name': 'lun_name',
+ 'flexvol_name': 'vol_name',
+ 'state': 'present'
+ }
+
+ def mock_args(self):
+ return {
+ 'vserver': self.mock_lun_args['vserver'],
+ 'name': self.mock_lun_args['name'],
+ 'flexvol_name': self.mock_lun_args['flexvol_name'],
+ 'state': self.mock_lun_args['state'],
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ }
+ # self.server = MockONTAPConnection()
+
+ def get_lun_mock_object(self, kind=None, parm1=None):
+ """
+ Helper method to return an na_ontap_lun object
+ :param kind: passes this param to MockONTAPConnection()
+ :return: na_ontap_interface object
+ """
+ lun_obj = my_module()
+ lun_obj.autosupport_log = Mock(return_value=None)
+ lun_obj.server = MockONTAPConnection(kind=kind, parm1=parm1)
+ return lun_obj
+
+ def test_module_fail_when_required_args_missing(self):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ my_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+ def test_create_error_missing_param(self):
+ ''' Test if create throws an error if required param 'destination_vserver' is not specified'''
+ data = self.mock_args()
+ set_module_args(data)
+ data.pop('flexvol_name')
+ data['san_application_template'] = dict(name='san_appli')
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_lun_mock_object().apply()
+ msg = 'size is a required parameter for create.'
+ assert msg == exc.value.args[0]['msg']
+
+ def test_create_error_missing_param2(self):
+ ''' Test if create throws an error if required param 'destination_vserver' is not specified'''
+ data = self.mock_args()
+ data.pop('flexvol_name')
+ data['size'] = 5
+ data['san_application_template'] = dict(lun_count=6)
+ set_module_args(data)
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_lun_mock_object().apply()
+ msg = 'missing required arguments: name found in san_application_template'
+ assert msg == exc.value.args[0]['msg']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_successful_create_appli(self, mock_request):
+ ''' Test successful create '''
+ mock_request.side_effect = [
+ SRR['is_rest_98'],
+ SRR['get_apps_empty'], # GET application/applications
+ SRR['get_apps_empty'], # GET volumes
+ SRR['empty_good'], # POST application/applications
+ SRR['end_of_sequence']
+ ]
+ data = dict(self.mock_args())
+ data['size'] = 5
+ data.pop('flexvol_name')
+ tiering = dict(control='required')
+ data['san_application_template'] = dict(name='san_appli', tiering=tiering)
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_lun_mock_object().apply()
+ assert exc.value.args[0]['changed']
+ expected_json = {'name': 'san_appli', 'svm': {'name': 'ansible'}, 'smart_container': True,
+ 'san': {'application_components':
+ [{'name': 'lun_name', 'lun_count': 1, 'total_size': 5368709120, 'tiering': {'control': 'required'}}]}}
+ expected_call = call(
+ 'POST', 'application/applications', {'return_timeout': 30, 'return_records': 'true'}, json=expected_json, headers=None, files=None)
+ assert expected_call in mock_request.mock_calls
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_successful_create_appli_idem(self, mock_request):
+ ''' Test successful create idempotent '''
+ mock_request.side_effect = copy.deepcopy([
+ SRR['is_rest_98'],
+ SRR['get_apps_found'], # GET application/applications
+ SRR['get_app_details'], # GET application/applications/<uuid>
+ SRR['get_apps_found'], # GET application/applications/<uuid>/components
+ SRR['get_app_component_details'], # GET application/applications/<uuid>/components/<cuuid>
+ SRR['end_of_sequence']
+ ])
+ data = dict(self.mock_args())
+ data['size'] = 5
+ data.pop('flexvol_name')
+ data['san_application_template'] = dict(name='san_appli')
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_lun_mock_object().apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_successful_create_appli_idem_no_comp(self, mock_request):
+ ''' Test successful create idempotent '''
+ mock_request.side_effect = copy.deepcopy([
+ SRR['is_rest_98'],
+ SRR['get_apps_found'], # GET application/applications
+ SRR['get_app_details'], # GET application/applications/<uuid>
+ SRR['get_apps_empty'], # GET application/applications/<uuid>/components
+ SRR['end_of_sequence']
+ ])
+ data = dict(self.mock_args())
+ data['size'] = 5
+ data.pop('flexvol_name')
+ data['san_application_template'] = dict(name='san_appli')
+ set_module_args(data)
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_lun_mock_object().apply()
+ # print(mock_request.call_args_list)
+ msg = 'Error: no component for application san_appli'
+ assert msg == exc.value.args[0]['msg']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_successful_delete_appli(self, mock_request):
+ ''' Test successful create '''
+ mock_request.side_effect = [
+ SRR['is_rest_98'],
+ SRR['get_apps_found'], # GET application/applications
+ SRR['empty_good'], # POST application/applications
+ SRR['end_of_sequence']
+ ]
+ data = dict(self.mock_args())
+ data['size'] = 5
+ data.pop('flexvol_name')
+ data['san_application_template'] = dict(name='san_appli')
+ data['state'] = 'absent'
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_lun_mock_object().apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_successful_delete_appli_idem(self, mock_request):
+ ''' Test successful delete idempotent '''
+ mock_request.side_effect = [
+ SRR['is_rest_98'],
+ SRR['get_apps_empty'], # GET application/applications
+ SRR['end_of_sequence']
+ ]
+ data = dict(self.mock_args())
+ data['size'] = 5
+ data.pop('flexvol_name')
+ data['san_application_template'] = dict(name='san_appli')
+ data['state'] = 'absent'
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_lun_mock_object().apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_successful_modify_appli(self, mock_request):
+ ''' Test successful modify application '''
+ mock_request.side_effect = copy.deepcopy([
+ SRR['is_rest_98'],
+ SRR['get_apps_found'], # GET application/applications
+ SRR['get_app_details'], # GET application/applications/<uuid>
+ SRR['get_apps_found'], # GET application/applications/<uuid>/components
+ SRR['get_app_component_details'], # GET application/applications/<uuid>/components/<cuuid>
+ SRR['empty_good'],
+ SRR['get_lun_path'],
+ SRR['get_storage'],
+ SRR['one_lun'],
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ])
+ data = dict(self.mock_args())
+ data['os_type'] = 'xyz'
+ data['space_reserve'] = True
+ data.pop('flexvol_name')
+ data['san_application_template'] = dict(name='san_appli', lun_count=5, total_size=1000, igroup_name='abc')
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_lun_mock_object().apply()
+ print(exc.value.args[0])
+ # print(mock_request.call_args_list)
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_error_modify_appli_missing_igroup(self, mock_request):
+ ''' Test successful modify application '''
+ mock_request.side_effect = copy.deepcopy([
+ SRR['is_rest_98'],
+ SRR['get_apps_found'], # GET application/applications
+ SRR['get_app_details'], # GET application/applications/<uuid>
+ # SRR['get_apps_found'], # GET application/applications/<uuid>/components
+ # SRR['get_app_component_details'], # GET application/applications/<uuid>/components/<cuuid>
+ SRR['end_of_sequence']
+ ])
+ data = dict(self.mock_args())
+ data['size'] = 5
+ data.pop('flexvol_name')
+ data['san_application_template'] = dict(name='san_appli', lun_count=5)
+ set_module_args(data)
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_lun_mock_object().apply()
+ msg = 'Error: igroup_name is a required parameter when increasing lun_count.'
+ assert msg in exc.value.args[0]['msg']
+ msg = 'Error: total_size is a required parameter when increasing lun_count.'
+ assert msg in exc.value.args[0]['msg']
+ msg = 'Error: os_type is a required parameter when increasing lun_count.'
+ assert msg in exc.value.args[0]['msg']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_successful_no_action(self, mock_request):
+ ''' Test successful modify application '''
+ mock_request.side_effect = copy.deepcopy([
+ SRR['is_rest_98'],
+ SRR['get_apps_found'], # GET application/applications
+ SRR['get_app_details'], # GET application/applications/<uuid>
+ SRR['get_apps_found'], # GET application/applications/<uuid>/components
+ SRR['get_app_component_details'], # GET application/applications/<uuid>/components/<cuuid>
+ SRR['end_of_sequence']
+ ])
+ data = dict(self.mock_args())
+ data['name'] = 'unknown'
+ data.pop('flexvol_name')
+ data['san_application_template'] = dict(name='san_appli', lun_count=5)
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_lun_mock_object().apply()
+ print(exc.value.args[0])
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_successful_no_96(self, mock_request):
+ ''' Test SAN application not supported on 9.6 '''
+ mock_request.side_effect = copy.deepcopy([
+ SRR['is_rest_96'],
+ SRR['end_of_sequence']
+ ])
+ data = dict(self.mock_args())
+ data['name'] = 'unknown'
+ data.pop('flexvol_name')
+ data['san_application_template'] = dict(name='san_appli', lun_count=5)
+ set_module_args(data)
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_lun_mock_object().apply()
+ print(exc.value.args[0]['msg'])
+ msg = 'Error: using san_application_template requires ONTAP 9.7 or later and REST must be enabled.'
+ assert msg in exc.value.args[0]['msg']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_successful_no_modify_on97(self, mock_request):
+ ''' Test modify SAN application not supported on 9.7 '''
+ mock_request.side_effect = copy.deepcopy([
+ SRR['is_rest_97'],
+ SRR['get_apps_found'], # GET application/applications
+ SRR['get_app_details'], # GET application/applications/<uuid>
+ SRR['get_apps_found'], # GET application/applications/<uuid>/components
+ SRR['get_app_component_details'], # GET application/applications/<uuid>/components/<cuuid>
+ SRR['end_of_sequence']
+ ])
+ data = dict(self.mock_args())
+ data.pop('flexvol_name')
+ data['os_type'] = 'xyz'
+ data['san_application_template'] = dict(name='san_appli', lun_count=5, total_size=1000, igroup_name='abc')
+ set_module_args(data)
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_lun_mock_object().apply()
+ print(exc.value.args[0])
+ msg = 'Error: modifying lun_count, total_size is not supported on ONTAP 9.7'
+ # in python 2.6, keys() is not sorted!
+ msg2 = 'Error: modifying total_size, lun_count is not supported on ONTAP 9.7'
+ assert msg in exc.value.args[0]['msg'] or msg2 in exc.value.args[0]['msg']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_successful_no_modify_on97_2(self, mock_request):
+ ''' Test modify SAN application not supported on 9.7 '''
+ mock_request.side_effect = copy.deepcopy([
+ SRR['is_rest_97'],
+ SRR['get_apps_found'], # GET application/applications
+ SRR['get_app_details'], # GET application/applications/<uuid>
+ SRR['get_apps_found'], # GET application/applications/<uuid>/components
+ SRR['get_app_component_details'], # GET application/applications/<uuid>/components/<cuuid>
+ SRR['end_of_sequence']
+ ])
+ data = dict(self.mock_args())
+ data.pop('flexvol_name')
+ data['san_application_template'] = dict(name='san_appli', total_size=1000)
+ set_module_args(data)
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_lun_mock_object().apply()
+ print(exc.value.args[0])
+ msg = 'Error: modifying total_size is not supported on ONTAP 9.7'
+ assert msg in exc.value.args[0]['msg']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_app_changes_reduction_not_allowed(self, mock_request):
+ ''' Test modify SAN application - can't decrease size '''
+ mock_request.side_effect = copy.deepcopy([
+ SRR['is_rest_98'],
+ SRR['get_apps_found'], # GET application/applications
+ SRR['get_app_details'], # GET application/applications/<uuid>
+ # SRR['get_apps_found'], # GET application/applications/<uuid>/components
+ # SRR['get_app_component_details'], # GET application/applications/<uuid>/components/<cuuid>
+ SRR['end_of_sequence']
+ ])
+ data = dict(self.mock_args())
+ data.pop('flexvol_name')
+ data['san_application_template'] = dict(name='san_appli', total_size=899, total_size_unit='b')
+ set_module_args(data)
+ lun_object = self.get_lun_mock_object()
+ with pytest.raises(AnsibleFailJson) as exc:
+ lun_object.app_changes('scope')
+ msg = "Error: can't reduce size: total_size=1000, provisioned=1100, requested=899"
+ assert msg in exc.value.args[0]['msg']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_app_changes_reduction_small_enough_10(self, mock_request):
+ ''' Test modify SAN application - a 10% reduction is ignored '''
+ mock_request.side_effect = copy.deepcopy([
+ SRR['is_rest_98'],
+ SRR['get_apps_found'], # GET application/applications
+ SRR['get_app_details'], # GET application/applications/<uuid>
+ # SRR['get_apps_found'], # GET application/applications/<uuid>/components
+ # SRR['get_app_component_details'], # GET application/applications/<uuid>/components/<cuuid>
+ SRR['end_of_sequence']
+ ])
+ data = dict(self.mock_args())
+ data.pop('flexvol_name')
+ data['san_application_template'] = dict(name='san_appli', total_size=900, total_size_unit='b')
+ set_module_args(data)
+ lun_object = self.get_lun_mock_object()
+ results = lun_object.app_changes('scope')
+ print(results)
+ print(lun_object.debug)
+ msg = "Ignoring small reduction (10.0 %) in total size: total_size=1000, provisioned=1100, requested=900"
+ assert_warning_was_raised(msg)
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_app_changes_reduction_small_enough_17(self, mock_request):
+ ''' Test modify SAN application - a 1.7% reduction is ignored '''
+ mock_request.side_effect = copy.deepcopy([
+ SRR['is_rest_98'],
+ SRR['get_apps_found'], # GET application/applications
+ SRR['get_app_details'], # GET application/applications/<uuid>
+ # SRR['get_apps_found'], # GET application/applications/<uuid>/components
+ # SRR['get_app_component_details'], # GET application/applications/<uuid>/components/<cuuid>
+ SRR['end_of_sequence']
+ ])
+ data = dict(self.mock_args())
+ data.pop('flexvol_name')
+ data['san_application_template'] = dict(name='san_appli', total_size=983, total_size_unit='b')
+ set_module_args(data)
+ lun_object = self.get_lun_mock_object()
+ results = lun_object.app_changes('scope')
+ print(results)
+ print(lun_object.debug)
+ print_warnings()
+ msg = "Ignoring small reduction (1.7 %) in total size: total_size=1000, provisioned=1100, requested=983"
+ assert_warning_was_raised(msg)
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_app_changes_increase_small_enough(self, mock_request):
+ ''' Test modify SAN application - a 1.7% reduction is ignored '''
+ mock_request.side_effect = copy.deepcopy([
+ SRR['is_rest_98'],
+ SRR['get_apps_found'], # GET application/applications
+ SRR['get_app_details'], # GET application/applications/<uuid>
+ # SRR['get_apps_found'], # GET application/applications/<uuid>/components
+ # SRR['get_app_component_details'], # GET application/applications/<uuid>/components/<cuuid>
+ SRR['end_of_sequence']
+ ])
+ data = dict(self.mock_args())
+ data.pop('flexvol_name')
+ data['san_application_template'] = dict(name='san_appli', total_size=1050, total_size_unit='b')
+ set_module_args(data)
+ lun_object = self.get_lun_mock_object()
+ results = lun_object.app_changes('scope')
+ print(results)
+ print(lun_object.debug)
+ msg = "Ignoring increase: requested size is too small: total_size=1000, provisioned=1100, requested=1050"
+ assert_warning_was_raised(msg)
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_successful_convert_to_appli(self, mock_request):
+ ''' Test successful convert to application
+ Appli does not exist, but the volume does.
+ '''
+ mock_request.side_effect = copy.deepcopy([
+ SRR['is_rest_98'],
+ SRR['get_apps_empty'], # GET application/applications
+ SRR['get_volumes_found'], # GET volumes
+ SRR['empty_good'], # POST application/applications
+ SRR['get_apps_found'], # GET application/applications
+ SRR['get_app_details'], # GET application/applications/<uuid>
+ SRR['end_of_sequence']
+ ])
+ data = dict(self.mock_args())
+ data['size'] = 5
+ data.pop('flexvol_name')
+ tiering = dict(control='required')
+ data['san_application_template'] = dict(name='san_appli', tiering=tiering, scope='application')
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_lun_mock_object().apply()
+ # assert exc.value.args[0]['changed']
+ print(mock_request.mock_calls)
+ print(exc.value.args[0])
+ expected_json = {'name': 'san_appli', 'svm': {'name': 'ansible'}, 'smart_container': True,
+ 'san': {'application_components':
+ [{'name': 'lun_name'}]}}
+ expected_call = call(
+ 'POST', 'application/applications', {'return_timeout': 30, 'return_records': 'true'}, json=expected_json, headers=None, files=None)
+ assert expected_call in mock_request.mock_calls
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_negative_convert_to_appli(self, mock_request):
+ ''' Test successful convert to application
+ Appli does not exist, but the volume does.
+ '''
+ mock_request.side_effect = [
+ SRR['is_rest_97'],
+ SRR['get_apps_empty'], # GET application/applications
+ SRR['get_volumes_found'], # GET volumes
+ SRR['end_of_sequence']
+ ]
+ data = dict(self.mock_args())
+ data['size'] = 5
+ data.pop('flexvol_name')
+ tiering = dict(control='required')
+ data['san_application_template'] = dict(name='san_appli', tiering=tiering, scope='application')
+ set_module_args(data)
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_lun_mock_object().apply()
+ msg = "Error: converting a LUN volume to a SAN application container requires ONTAP 9.8 or better."
+ assert msg in exc.value.args[0]['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_lun_copy.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_lun_copy.py
new file mode 100644
index 000000000..93d446809
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_lun_copy.py
@@ -0,0 +1,113 @@
+# (c) 2018-2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test template for ONTAP Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+import sys
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import patch_ansible,\
+ create_module, create_and_apply, expect_and_capture_ansible_exception
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke,\
+ register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_lun_copy \
+ import NetAppOntapLUNCopy as my_module # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not be available')
+
+DEFAULT_ARGS = {
+ 'source_vserver': 'ansible',
+ 'destination_path': '/vol/test/test_copy_dest_dest_new_reviewd_new',
+ 'source_path': '/vol/test/test_copy_1',
+ 'destination_vserver': 'ansible',
+ 'state': 'present',
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'use_rest': 'never'
+}
+
+
+ZRR = zapi_responses({
+ 'lun_info': build_zapi_response({'num-records': 1})
+})
+
+
+SRR = rest_responses({
+ 'lun_info': (200, {"records": [{
+ "name": "/vol/vol0/lun1_10"
+ }], "num_records": 1}, None)
+})
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ # with python 2.6, dictionaries are not ordered
+ fragments = ["missing required arguments:", "hostname", "destination_vserver", "destination_path", "source_path"]
+ error = create_module(my_module, {}, fail=True)['msg']
+ for fragment in fragments:
+ assert fragment in error
+
+
+def test_create_error_missing_param():
+ ''' Test if create throws an error if required param 'destination_vserver' is not specified'''
+ DEFAULT_ARGS_COPY = DEFAULT_ARGS.copy()
+ del DEFAULT_ARGS_COPY['destination_vserver']
+ msg = 'missing required arguments: destination_vserver'
+ assert msg in create_module(my_module, DEFAULT_ARGS_COPY, fail=True)['msg']
+
+
+def test_successful_copy():
+ ''' Test successful create and idempotent check '''
+ register_responses([
+ ('lun-get-iter', ZRR['empty']),
+ ('lun-copy-start', ZRR['success']),
+ ('lun-get-iter', ZRR['lun_info'])
+ ])
+ assert create_and_apply(my_module, DEFAULT_ARGS)['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS)['changed']
+
+
+def test_if_all_methods_catch_exception():
+ register_responses([
+ ('lun-get-iter', ZRR['error']),
+ ('lun-copy-start', ZRR['error']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'storage/luns', SRR['generic_error']),
+ ('POST', 'storage/luns', SRR['generic_error']),
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'storage/luns', SRR['empty_records'])
+ ])
+ lun_obj = create_module(my_module, DEFAULT_ARGS)
+ assert 'Error getting lun info' in expect_and_capture_ansible_exception(lun_obj.get_lun, 'fail')['msg']
+ assert 'Error copying lun from' in expect_and_capture_ansible_exception(lun_obj.copy_lun, 'fail')['msg']
+ lun_obj = create_module(my_module, DEFAULT_ARGS, {'use_rest': 'always'})
+ assert 'Error getting lun info' in expect_and_capture_ansible_exception(lun_obj.get_lun_rest, 'fail')['msg']
+ assert 'Error copying lun from' in expect_and_capture_ansible_exception(lun_obj.copy_lun_rest, 'fail')['msg']
+ assert 'REST requires ONTAP 9.10.1 or later' in create_module(my_module, DEFAULT_ARGS, {'use_rest': 'always'}, fail=True)['msg']
+ args = {'use_rest': 'always', 'destination_vserver': 'some_vserver'}
+ assert 'REST does not supports inter-Vserver lun copy' in create_and_apply(my_module, DEFAULT_ARGS, args, fail=True)['msg']
+
+
+def test_successful_copy_rest():
+ ''' Test successful create and idempotent check in REST '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'storage/luns', SRR['empty_records']),
+ ('POST', 'storage/luns', SRR['success']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'storage/luns', SRR['lun_info']),
+ ])
+ assert create_and_apply(my_module, DEFAULT_ARGS, {'use_rest': 'always'})['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS, {'use_rest': 'always'})['changed']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_lun_map.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_lun_map.py
new file mode 100644
index 000000000..120e5f7b3
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_lun_map.py
@@ -0,0 +1,159 @@
+''' unit tests ONTAP Ansible module: na_ontap_lun_map '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat import unittest
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_lun_map \
+ import NetAppOntapLUNMap as my_module
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+class MockONTAPConnection(object):
+ ''' mock server connection to ONTAP host '''
+
+ def __init__(self, kind=None):
+ ''' save arguments '''
+ self.type = kind
+ self.xml_in = None
+ self.xml_out = None
+
+ def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
+ ''' mock invoke_successfully returning xml data '''
+ self.xml_in = xml
+ if self.type == 'lun_map':
+ xml = self.build_lun_info()
+ elif self.type == 'lun_map_fail':
+ raise netapp_utils.zapi.NaApiError(code='TEST', message="This exception is from the unit test")
+ self.xml_out = xml
+ return xml
+
+ @staticmethod
+ def build_lun_info():
+ ''' build xml data for lun-map-entry '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ data = {'initiator-groups': [{'initiator-group-info': {'initiator-group-name': 'ansible', 'lun-id': 2}}]}
+ xml.translate_struct(data)
+ return xml
+
+
+class TestMyModule(unittest.TestCase):
+ ''' a group of related Unit Tests '''
+
+ def setUp(self):
+ self.server = MockONTAPConnection()
+ self.onbox = False
+
+ def set_default_args(self):
+ if self.onbox:
+ hostname = '10.10.10.10'
+ username = 'admin'
+ password = 'password'
+ initiator_group_name = 'ansible'
+ vserver = 'ansible'
+ path = '/vol/ansible/test'
+ lun_id = 2
+ else:
+ hostname = 'hostname'
+ username = 'username'
+ password = 'password'
+ initiator_group_name = 'ansible'
+ vserver = 'ansible'
+ path = '/vol/ansible/test'
+ lun_id = 2
+ return dict({
+ 'hostname': hostname,
+ 'username': username,
+ 'password': password,
+ 'initiator_group_name': initiator_group_name,
+ 'vserver': vserver,
+ 'path': path,
+ 'lun_id': lun_id,
+ 'use_rest': 'false'
+ })
+
+ def test_module_fail_when_required_args_missing(self):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ my_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+ def test_ensure_get_called(self):
+ ''' test get_lun_map for non-existent lun'''
+ set_module_args(self.set_default_args())
+ my_obj = my_module()
+ my_obj.server = self.server
+ assert my_obj.get_lun_map is not None
+
+ def test_ensure_get_called_existing(self):
+ ''' test get_lun_map for existing lun'''
+ set_module_args(self.set_default_args())
+ my_obj = my_module()
+ my_obj.server = MockONTAPConnection(kind='lun_map')
+ assert my_obj.get_lun_map()
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_lun_map.NetAppOntapLUNMap.create_lun_map')
+ def test_successful_create(self, create_lun_map):
+ ''' mapping lun and testing idempotency '''
+ data = self.set_default_args()
+ set_module_args(data)
+ my_obj = my_module()
+ if not self.onbox:
+ my_obj.server = self.server
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+ create_lun_map.assert_called_with()
+ # to reset na_helper from remembering the previous 'changed' value
+ set_module_args(self.set_default_args())
+ my_obj = my_module()
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('lun_map')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_lun_map.NetAppOntapLUNMap.delete_lun_map')
+ def test_successful_delete(self, delete_lun_map):
+ ''' unmapping lun and testing idempotency '''
+ data = self.set_default_args()
+ data['state'] = 'absent'
+ set_module_args(data)
+ my_obj = my_module()
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('lun_map')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+ delete_lun_map.assert_called_with()
+ # to reset na_helper from remembering the previous 'changed' value
+ my_obj = my_module()
+ if not self.onbox:
+ my_obj.server = self.server
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert not exc.value.args[0]['changed']
+
+ def test_if_all_methods_catch_exception(self):
+ module_args = {}
+ module_args.update(self.set_default_args())
+ set_module_args(module_args)
+ my_obj = my_module()
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('lun_map_fail')
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.create_lun_map()
+ assert 'Error mapping lun' in exc.value.args[0]['msg']
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.delete_lun_map()
+ assert 'Error unmapping lun' in exc.value.args[0]['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_lun_map_reporting_nodes.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_lun_map_reporting_nodes.py
new file mode 100644
index 000000000..e09016eda
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_lun_map_reporting_nodes.py
@@ -0,0 +1,170 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test template for ONTAP lun reporting nodes Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+import sys
+
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import patch_ansible,\
+ create_module, create_and_apply, expect_and_capture_ansible_exception
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke,\
+ register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_lun_map_reporting_nodes \
+ import NetAppOntapLUNMapReportingNodes as my_module # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not be available')
+
+
+DEFAULT_ARGS = {
+ 'initiator_group_name': 'igroup1',
+ "path": "/vol/lun1/lun1_1",
+ "vserver": "svm1",
+ 'nodes': 'ontap910-01',
+ 'state': 'present',
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'use_rest': 'never'
+}
+
+
+node_info = {
+ 'num-records': "1",
+ 'attributes-list': {
+ 'lun-map-info': {
+ 'reporting-nodes': [{"node-name": "ontap910-01"}]
+ }
+ }
+}
+
+
+nodes_info = {
+ 'num-records': "1",
+ 'attributes-list': {
+ 'lun-map-info': {
+ 'reporting-nodes': [{"node-name": "ontap910-01"}, {"node-name": "ontap910-02"}]
+ }
+ }
+}
+
+
+ZRR = zapi_responses({
+ 'node_info': build_zapi_response(node_info),
+ 'nodes_info': build_zapi_response(nodes_info)
+})
+
+
+SRR = rest_responses({
+ 'node_info': (200, {"records": [{
+ "svm": {"name": "svm1"},
+ "lun": {"uuid": "ea78ec41", "name": "/vol/ansibleLUN/ansibleLUN"},
+ "igroup": {"uuid": "8b8aa177", "name": "testme_igroup"},
+ "reporting_nodes": [{"uuid": "20f6b3d5", "name": "ontap910-01"}]
+ }], "num_records": 1}, None),
+ 'nodes_info': (200, {"records": [{
+ "svm": {"name": "svm1"},
+ "lun": {"uuid": "ea78ec41", "name": "/vol/ansibleLUN/ansibleLUN"},
+ "igroup": {"uuid": "8b8aa177", "name": "testme_igroup"},
+ "reporting_nodes": [{"uuid": "20f6b3d5", "name": "ontap910-01"}, {"uuid": "20f6b3d6", "name": "ontap910-02"}]
+ }], "num_records": 1}, None)
+})
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ # with python 2.6, dictionaries are not ordered
+ fragments = ["missing required arguments:", "hostname", "initiator_group_name", "vserver", "path", "nodes"]
+ error = create_module(my_module, {}, fail=True)['msg']
+ for fragment in fragments:
+ assert fragment in error
+
+
+def test_successful_add_node():
+ ''' Test successful add and idempotent check '''
+ register_responses([
+ ('lun-map-get-iter', ZRR['node_info']),
+ ('lun-map-add-reporting-nodes', ZRR['success']),
+ ('lun-map-get-iter', ZRR['nodes_info']),
+ ])
+ args = {'nodes': ['ontap910-01', 'ontap910-02']}
+ assert create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_successful_remove_node():
+ ''' Test successful remove and idempotent check '''
+ register_responses([
+ ('lun-map-get-iter', ZRR['nodes_info']),
+ ('lun-map-remove-reporting-nodes', ZRR['success']),
+ ('lun-map-get-iter', ZRR['node_info']),
+ ])
+ args = {'nodes': 'ontap910-02', 'state': 'absent'}
+ assert create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_if_all_methods_catch_exception():
+ register_responses([
+ ('lun-map-get-iter', ZRR['no_records']),
+ ('lun-map-get-iter', ZRR['error']),
+ ('lun-map-add-reporting-nodes', ZRR['error']),
+ ('lun-map-remove-reporting-nodes', ZRR['error']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/san/lun-maps', SRR['generic_error']),
+ ('POST', 'protocols/san/lun-maps/3edf6t/3edf62/reporting-nodes', SRR['generic_error']),
+ ('DELETE', 'protocols/san/lun-maps/3edf6t/3edf62/reporting-nodes/3dr567', SRR['generic_error']),
+ ('GET', 'cluster', SRR['is_rest_9_9_1'])
+ ])
+ node_obj = create_module(my_module, DEFAULT_ARGS)
+ assert 'Error: LUN map not found' in expect_and_capture_ansible_exception(node_obj.apply, 'fail')['msg']
+ assert 'Error getting LUN' in expect_and_capture_ansible_exception(node_obj.get_lun_map_reporting_nodes, 'fail')['msg']
+ assert 'Error creating LUN map reporting nodes' in expect_and_capture_ansible_exception(node_obj.add_lun_map_reporting_nodes, 'fail', 'node1')['msg']
+ assert 'Error deleting LUN map reporting node' in expect_and_capture_ansible_exception(node_obj.remove_lun_map_reporting_nodes, 'fail', 'node1')['msg']
+
+ node_obj = create_module(my_module, DEFAULT_ARGS, {'use_rest': 'always'})
+ node_obj.lun_uuid, node_obj.igroup_uuid = '3edf6t', '3edf62'
+ node_obj.nodes_uuids = {'node1': '3dr567'}
+ assert 'Error getting LUN' in expect_and_capture_ansible_exception(node_obj.get_lun_map_reporting_nodes, 'fail')['msg']
+ assert 'Error creating LUN map reporting node' in expect_and_capture_ansible_exception(node_obj.add_lun_map_reporting_nodes_rest, 'fail', 'node1')['msg']
+ assert 'Error deleting LUN map reporting node' in expect_and_capture_ansible_exception(node_obj.remove_lun_map_reporting_nodes_rest, 'fail', 'node1')['msg']
+ assert 'REST requires ONTAP 9.10.1 or later' in create_module(my_module, DEFAULT_ARGS, {'use_rest': 'always'}, fail=True)['msg']
+
+
+def test_successful_add_node_rest():
+ ''' Test successful add and idempotent check '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/san/lun-maps', SRR['node_info']),
+ ('POST', 'protocols/san/lun-maps/ea78ec41/8b8aa177/reporting-nodes', SRR['success']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/san/lun-maps', SRR['nodes_info'])
+ ])
+ args = {'nodes': ['ontap910-01', 'ontap910-02'], 'use_rest': 'always'}
+ assert create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_successful_remove_node_rest():
+ ''' Test successful remove and idempotent check '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/san/lun-maps', SRR['nodes_info']),
+ ('DELETE', 'protocols/san/lun-maps/ea78ec41/8b8aa177/reporting-nodes/20f6b3d6', SRR['success']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/san/lun-maps', SRR['node_info'])
+ ])
+ args = {'nodes': 'ontap910-02', 'state': 'absent', 'use_rest': 'always'}
+ assert create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_lun_map_rest.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_lun_map_rest.py
new file mode 100644
index 000000000..1881ee37a
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_lun_map_rest.py
@@ -0,0 +1,200 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import pytest
+import sys
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args, \
+ patch_ansible, create_and_apply, create_module, expect_and_capture_ansible_exception, AnsibleFailJson
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import get_mock_record, \
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_lun_map \
+ import NetAppOntapLUNMap as my_module, main as my_main # module under test
+
+# needed for get and modify/delete as they still use ZAPI
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+# REST API canned responses when mocking send_request
+
+SRR = rest_responses({
+ 'lun': (200, {"records": [
+ {
+ "uuid": "2f030603-3daa-4e19-9888-f9c3ac9a9117",
+ "name": "/vol/ansibleLUN_vol1/ansibleLUN",
+ "os_type": "linux",
+ "serial_number": "wOpku+Rjd-YL",
+ "space": {
+ "size": 5242880
+ },
+ "status": {
+ "state": "online"
+ }
+ }]}, None),
+ 'lun_map': (200, {"records": [
+ {
+ "igroup": {
+ "uuid": "1ad8544d-8cd1-91e0-9e1c-723478563412",
+ "name": "igroup1",
+ },
+ "logical_unit_number": 1,
+ "lun": {
+ "uuid": "1cd8a442-86d1-11e0-ae1c-123478563412",
+ "name": "this/is/a/path",
+ },
+ "svm": {
+ "name": "svm1",
+ "uuid": "02c9e252-41be-11e9-81d5-00a0986138f7"
+ }
+ }
+ ]}, None)
+})
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'path': 'this/is/a/path',
+ 'initiator_group_name': 'igroup1',
+ 'vserver': 'svm1',
+ 'use_rest': 'always',
+}
+
+
+def test_get_lun_map_none():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/san/lun-maps', SRR['empty_records'])
+ ])
+ set_module_args(DEFAULT_ARGS)
+ my_obj = my_module()
+ assert my_obj.get_lun_map_rest() is None
+
+
+def test_get_lun_map_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/san/lun-maps', SRR['generic_error'])
+ ])
+ my_module_object = create_module(my_module, DEFAULT_ARGS)
+ msg = 'Error getting lun_map this/is/a/path: calling: protocols/san/lun-maps: got Expected error.'
+ assert msg in expect_and_capture_ansible_exception(my_module_object.get_lun_map_rest, 'fail')['msg']
+
+
+def test_get_lun_map_one_record():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/san/lun-maps', SRR['lun_map'])
+ ])
+ set_module_args(DEFAULT_ARGS)
+ my_obj = my_module()
+ assert my_obj.get_lun_map_rest() is not None
+
+
+def test_get_lun_one_record():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/luns', SRR['lun'])
+ ])
+ set_module_args(DEFAULT_ARGS)
+ my_obj = my_module()
+ assert my_obj.get_lun_rest() is not None
+
+
+def test_get_lun_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/luns', SRR['generic_error'])
+ ])
+ my_module_object = create_module(my_module, DEFAULT_ARGS)
+ msg = 'Error getting lun this/is/a/path: calling: storage/luns: got Expected error.'
+ assert msg in expect_and_capture_ansible_exception(my_module_object.get_lun_rest, 'fail')['msg']
+
+
+def test_create_lun_map():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/luns', SRR['empty_records']),
+ ('GET', 'protocols/san/lun-maps', SRR['empty_records']),
+ ('POST', 'protocols/san/lun-maps', SRR['empty_good'])
+ ])
+ assert create_and_apply(my_module, DEFAULT_ARGS, {})['changed']
+
+
+def test_create_lun_map_with_lun_id():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/luns', SRR['empty_records']),
+ ('GET', 'protocols/san/lun-maps', SRR['empty_records']),
+ ('POST', 'protocols/san/lun-maps', SRR['empty_good'])
+ ])
+ module_args = {'lun_id': '1'}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_create_lun_map_with_lun_id_idempotent():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/luns', SRR['lun']),
+ ('GET', 'protocols/san/lun-maps', SRR['lun_map'])
+ ])
+ assert create_and_apply(my_module, DEFAULT_ARGS, {'lun_id': '1'})['changed'] is False
+
+
+def test_create_lun_map_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('POST', 'protocols/san/lun-maps', SRR['generic_error'])
+ ])
+ my_module_object = create_module(my_module, DEFAULT_ARGS)
+ msg = 'Error creating lun_map this/is/a/path: calling: protocols/san/lun-maps: got Expected error.'
+ assert msg in expect_and_capture_ansible_exception(my_module_object.create_lun_map_rest, 'fail')['msg']
+
+
+def test_delete_lun_map():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/luns', SRR['empty_records']),
+ ('GET', 'protocols/san/lun-maps', SRR['lun_map']),
+ ('DELETE', 'protocols/san/lun-maps/1cd8a442-86d1-11e0-ae1c-123478563412/1ad8544d-8cd1-91e0-9e1c-723478563412',
+ SRR['empty_good'])
+ ])
+ module_args = {'state': 'absent'}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_lun_map_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/luns', SRR['empty_records']),
+ ('GET', 'protocols/san/lun-maps', SRR['lun_map']),
+ ])
+ module_args = {'initiator_group_name': 'new name'}
+ msg = 'Modification of lun_map not allowed'
+ assert msg in create_and_apply(my_module, DEFAULT_ARGS, module_args, 'fail')['msg']
+
+
+def test_delete_lun_map_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('DELETE', 'protocols/san/lun-maps/1cd8a442-86d1-11e0-ae1c-123478563412/1ad8544d-8cd1-91e0-9e1c-723478563412',
+ SRR['generic_error'])
+ ])
+ set_module_args(DEFAULT_ARGS)
+ my_obj = my_module()
+ my_obj.parameters['state'] = 'absent'
+ my_obj.igroup_uuid = '1ad8544d-8cd1-91e0-9e1c-723478563412'
+ my_obj.lun_uuid = '1cd8a442-86d1-11e0-ae1c-123478563412'
+ msg = 'Error deleting lun_map this/is/a/path: calling: ' \
+ 'protocols/san/lun-maps/1cd8a442-86d1-11e0-ae1c-123478563412/1ad8544d-8cd1-91e0-9e1c-723478563412: got Expected error.'
+ assert msg == expect_and_capture_ansible_exception(my_obj.delete_lun_map_rest, 'fail')['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_lun_rest.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_lun_rest.py
new file mode 100644
index 000000000..fd65062d0
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_lun_rest.py
@@ -0,0 +1,558 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import pytest
+import sys
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import \
+ patch_ansible, create_and_apply, create_module, expect_and_capture_ansible_exception
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import get_mock_record, \
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_lun \
+ import NetAppOntapLUN as my_module, main as my_main # module under test
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+SRR = rest_responses({
+ 'one_lun': (200, {
+ "records": [
+ {
+ "uuid": "1cd8a442-86d1-11e0-ae1c-123478563412",
+ "qos_policy": {
+ "name": "qos1",
+ "uuid": "1cd8a442-86d1-11e0-ae1c-123478563412"
+ },
+ "os_type": "aix",
+ "enabled": True,
+ "location": {
+ "volume": {
+ "name": "volume1",
+ "uuid": "028baa66-41bd-11e9-81d5-00a0986138f7"
+ },
+ },
+ "name": "/vol/volume1/qtree1/lun1",
+ "space": {
+ "scsi_thin_provisioning_support_enabled": True,
+ "guarantee": {
+ "requested": True,
+ },
+ "size": 1073741824
+ },
+ "lun_maps": [
+ {
+ "igroup": {
+ "name": "igroup1",
+ "uuid": "4ea7a442-86d1-11e0-ae1c-123478563412"
+ },
+ "logical_unit_number": 0,
+ }
+ ],
+ "comment": "string",
+ "svm": {
+ "name": "svm1",
+ "uuid": "02c9e252-41be-11e9-81d5-00a0986138f7"
+ },
+ }
+ ],
+ }, None),
+ 'two_luns': (200, {
+ "records": [
+ {
+ "uuid": "1cd8a442-86d1-11e0-ae1c-123478563412",
+ "qos_policy": {
+ "name": "qos1",
+ "uuid": "1cd8a442-86d1-11e0-ae1c-123478563412"
+ },
+ "os_type": "aix",
+ "enabled": True,
+ "location": {
+ "volume": {
+ "name": "volume1",
+ "uuid": "028baa66-41bd-11e9-81d5-00a0986138f7"
+ },
+ },
+ "name": "/vol/volume1/qtree1/lun1",
+ "space": {
+ "scsi_thin_provisioning_support_enabled": True,
+ "guarantee": {
+ "requested": True,
+ },
+ "size": 1073741824
+ },
+ "lun_maps": [
+ {
+ "igroup": {
+ "name": "igroup1",
+ "uuid": "4ea7a442-86d1-11e0-ae1c-123478563412"
+ },
+ "logical_unit_number": 0,
+ }
+ ],
+ "comment": "string",
+ "svm": {
+ "name": "svm1",
+ "uuid": "02c9e252-41be-11e9-81d5-00a0986138f7"
+ },
+ },
+ {
+ "uuid": "1cd8a442-86d1-11e0-ae1c-123478563413",
+ "qos_policy": {
+ "name": "qos2",
+ "uuid": "1cd8a442-86d1-11e0-ae1c-123478563413"
+ },
+ "os_type": "aix",
+ "enabled": True,
+ "location": {
+ "volume": {
+ "name": "volume2",
+ "uuid": "028baa66-41bd-11e9-81d5-00a0986138f3"
+ },
+ },
+ "name": "/vol/volume1/qtree1/lun2",
+ "space": {
+ "scsi_thin_provisioning_support_enabled": True,
+ "guarantee": {
+ "requested": True,
+ },
+ "size": 1073741824
+ },
+ "lun_maps": [
+ {
+ "igroup": {
+ "name": "igroup2",
+ "uuid": "4ea7a442-86d1-11e0-ae1c-123478563413"
+ },
+ "logical_unit_number": 0,
+ }
+ ],
+ "comment": "string",
+ "svm": {
+ "name": "svm1",
+ "uuid": "02c9e252-41be-11e9-81d5-00a0986138f3"
+ },
+ }
+ ],
+ }, None),
+ 'error_same_size': (400, None, 'New LUN size is the same as the old LUN size - this mau happen ...')
+})
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'name': '/vol/volume1/qtree1/lun1',
+ 'flexvol_name': 'volume1',
+ 'vserver': 'svm1',
+ 'use_rest': 'always',
+}
+
+DEFAULT_ARGS_NO_VOL = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'name': '/vol/volume1/qtree1/lun1',
+ 'vserver': 'svm1',
+ 'use_rest': 'always',
+}
+
+DEFAULT_ARGS_MIN = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'vserver': 'svm1',
+ 'use_rest': 'always',
+}
+
+
+def test_get_lun_none():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/luns', SRR['empty_records'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ assert my_obj.get_luns_rest() is None
+
+
+def test_get_lun_one():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/luns', SRR['one_lun'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ get_results = my_obj.get_luns_rest()
+ assert len(get_results) == 1
+ assert get_results[0]['name'] == '/vol/volume1/qtree1/lun1'
+
+
+def test_get_lun_one_no_path():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/luns', SRR['one_lun'])
+ ])
+ module_args = {
+ 'name': 'lun1',
+ 'flexvol_name': 'volume1',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS_MIN, module_args)
+ get_results = my_obj.get_luns_rest()
+ assert len(get_results) == 1
+ assert get_results[0]['name'] == '/vol/volume1/qtree1/lun1'
+
+
+def test_get_lun_more():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/luns', SRR['two_luns'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ get_results = my_obj.get_luns_rest()
+ assert len(get_results) == 2
+ assert get_results[0]['name'] == '/vol/volume1/qtree1/lun1'
+ assert get_results[1]['name'] == '/vol/volume1/qtree1/lun2'
+
+
+def test_error_get_lun_with_flexvol():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/luns', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ error = expect_and_capture_ansible_exception(my_obj.get_luns_rest, 'fail')['msg']
+ print('Info: %s' % error)
+ assert "Error getting LUN's for flexvol volume1: calling: storage/luns: got Expected error." == error
+
+
+def test_error_get_lun_with_lun_path():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/luns', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['lun_path'] = '/vol/volume1/qtree1/lun1'
+ my_obj.parameters.pop('flexvol_name')
+
+ error = expect_and_capture_ansible_exception(my_obj.get_luns_rest, 'fail', '/vol/volume1/qtree1/lun1')['msg']
+ print('Info: %s' % error)
+ assert "Error getting lun_path /vol/volume1/qtree1/lun1: calling: storage/luns: got Expected error." == error
+
+
+def test_successfully_create_lun():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/luns', SRR['empty_records']),
+ ('POST', 'storage/luns', SRR['one_lun']),
+ ])
+ module_args = {
+ 'size': 1073741824,
+ 'size_unit': 'bytes',
+ 'os_type': 'linux',
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successfully_create_lun_without_path():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/luns', SRR['empty_records']),
+ ('POST', 'storage/luns', SRR['one_lun']),
+ ])
+ module_args = {
+ 'size': 1073741824,
+ 'size_unit': 'bytes',
+ 'os_type': 'linux',
+ 'flexvol_name': 'volume1',
+ 'name': 'lun'
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_create_lun_missing_os_type():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/luns', SRR['empty_records']),
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['size'] = 1073741824
+ my_obj.parameters['size_unit'] = 'bytes'
+ error = expect_and_capture_ansible_exception(my_obj.apply, 'fail')['msg']
+ print('Info: %s' % error)
+ assert "The os_type parameter is required for creating a LUN with REST." == error
+
+
+def test_error_create_lun_missing_size():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/luns', SRR['empty_records']),
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['os_type'] = 'linux'
+ error = expect_and_capture_ansible_exception(my_obj.apply, 'fail')['msg']
+ print('Info: %s' % error)
+ assert "size is a required parameter for create." == error
+
+
+def test_error_create_lun_missing_name():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ # Not sure why test_error_create_lun_missing_os_type require this... but this test dosn't. they should follow the
+ # same path (unless we don't do a get with flexvol_name isn't set)
+ # ('GET', 'storage/luns', SRR['empty_records']),
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters.pop('flexvol_name')
+ my_obj.parameters['os_type'] = 'linux'
+ my_obj.parameters['size'] = 1073741824
+ my_obj.parameters['size_unit'] = 'bytes'
+ error = expect_and_capture_ansible_exception(my_obj.apply, 'fail')['msg']
+ print('Info: %s' % error)
+ assert "The flexvol_name parameter is required for creating a LUN." == error
+
+
+def test_successfully_create_lun_all_options():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'storage/luns', SRR['empty_records']),
+ ('POST', 'storage/luns', SRR['one_lun']),
+ ])
+ module_args = {
+ 'size': '1073741824',
+ 'os_type': 'linux',
+ 'space_reserve': True,
+ 'space_allocation': True,
+ 'comment': 'carchi8py was here',
+ 'qos_policy_group': 'qos_policy_group_1',
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_create_lun():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('POST', 'storage/luns', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['size'] = 1073741824
+ my_obj.parameters['size_unit'] = 'bytes'
+ my_obj.parameters['os_type'] = 'linux'
+
+ error = expect_and_capture_ansible_exception(my_obj.create_lun_rest, 'fail')['msg']
+ print('Info: %s' % error)
+ assert "Error creating LUN /vol/volume1/qtree1/lun1: calling: storage/luns: got Expected error." == error
+
+
+def test_successfully_delete_lun():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/luns', SRR['one_lun']),
+ ('DELETE', 'storage/luns/1cd8a442-86d1-11e0-ae1c-123478563412', SRR['empty_records']),
+ ])
+ module_args = {
+ 'size': 1073741824,
+ 'size_unit': 'bytes',
+ 'os_type': 'linux',
+ 'state': 'absent',
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_delete_lun():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('DELETE', 'storage/luns/1cd8a442-86d1-11e0-ae1c-123478563412', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['size'] = 1073741824
+ my_obj.parameters['size_unit'] = 'bytes'
+ my_obj.parameters['os_type'] = 'linux'
+ my_obj.parameters['os_type'] = 'absent'
+ my_obj.uuid = '1cd8a442-86d1-11e0-ae1c-123478563412'
+
+ error = expect_and_capture_ansible_exception(my_obj.delete_lun_rest, 'fail')['msg']
+ print('Info: %s' % error)
+ assert "Error deleting LUN /vol/volume1/qtree1/lun1: calling: storage/luns/1cd8a442-86d1-11e0-ae1c-123478563412: got Expected error." == error
+
+
+def test_error_delete_lun_missing_uuid():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['size'] = 1073741824
+ my_obj.parameters['size_unit'] = 'bytes'
+ my_obj.parameters['os_type'] = 'linux'
+ my_obj.parameters['os_type'] = 'absent'
+
+ error = expect_and_capture_ansible_exception(my_obj.delete_lun_rest, 'fail')['msg']
+ print('Info: %s' % error)
+ assert "Error deleting LUN /vol/volume1/qtree1/lun1: UUID not found" == error
+
+
+def test_successfully_rename_lun():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/luns', SRR['empty_records']),
+ ('GET', 'storage/luns', SRR['one_lun']),
+ ('PATCH', 'storage/luns/1cd8a442-86d1-11e0-ae1c-123478563412', SRR['empty_records']),
+ ])
+ module_args = {
+ 'name': '/vol/volume1/qtree12/lun1',
+ 'from_name': '/vol/volume1/qtree1/lun1',
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_rename_lun():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('PATCH', 'storage/luns/1cd8a442-86d1-11e0-ae1c-123478563412', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['name'] = '/vol/volume1/qtree12/lun1'
+ my_obj.parameters['from_name'] = '/vol/volume1/qtree1/lun1'
+ my_obj.uuid = '1cd8a442-86d1-11e0-ae1c-123478563412'
+ error = expect_and_capture_ansible_exception(my_obj.rename_lun_rest, 'fail', '/vol/volume1/qtree12/lun1')['msg']
+ print('Info: %s' % error)
+ assert "Error renaming LUN /vol/volume1/qtree12/lun1: calling: storage/luns/1cd8a442-86d1-11e0-ae1c-123478563412: got Expected error." == error
+
+
+def test_error_rename_lun_missing_uuid():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['name'] = '/vol/volume1/qtree12/lun1'
+ my_obj.parameters['from_name'] = '/vol/volume1/qtree1/lun1'
+ error = expect_and_capture_ansible_exception(my_obj.rename_lun_rest, 'fail', '/vol/volume1/qtree12/lun1')['msg']
+ print('Info: %s' % error)
+ assert "Error renaming LUN /vol/volume1/qtree12/lun1: UUID not found" == error
+
+
+def test_successfully_resize_lun():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/luns', SRR['one_lun']),
+ ('PATCH', 'storage/luns/1cd8a442-86d1-11e0-ae1c-123478563412', SRR['empty_records']),
+ ])
+ module_args = {
+ 'size': 2147483648,
+ 'size_unit': 'bytes',
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_resize_lun():
+ ''' assert that
+ resize fails on error, except for a same size issue because of rounding errors
+ resize correctly return True/False to indicate that the size was changed or not
+ '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('PATCH', 'storage/luns/1cd8a442-86d1-11e0-ae1c-123478563412', SRR['generic_error']),
+ ('PATCH', 'storage/luns/1cd8a442-86d1-11e0-ae1c-123478563412', SRR['error_same_size']),
+ ('PATCH', 'storage/luns/1cd8a442-86d1-11e0-ae1c-123478563412', SRR['success'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['size'] = 2147483648
+ my_obj.parameters['size_unit'] = 'bytes'
+ my_obj.uuid = '1cd8a442-86d1-11e0-ae1c-123478563412'
+ error = expect_and_capture_ansible_exception(my_obj.resize_lun_rest, 'fail')['msg']
+ print('Info: %s' % error)
+ assert "Error resizing LUN /vol/volume1/qtree1/lun1: calling: storage/luns/1cd8a442-86d1-11e0-ae1c-123478563412: got Expected error." == error
+ assert not my_obj.resize_lun_rest()
+ assert my_obj.resize_lun_rest()
+
+
+def test_error_resize_lun_missing_uuid():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['size'] = 2147483648
+ my_obj.parameters['size_unit'] = 'bytes'
+ error = expect_and_capture_ansible_exception(my_obj.resize_lun_rest, 'fail')['msg']
+ print('Info: %s' % error)
+ assert "Error resizing LUN /vol/volume1/qtree1/lun1: UUID not found" == error
+
+
+def test_successfully_modify_lun():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/luns', SRR['one_lun']),
+ ('PATCH', 'storage/luns/1cd8a442-86d1-11e0-ae1c-123478563412', SRR['empty_records']),
+ ])
+ module_args = {
+ 'comment': 'carchi8py was here',
+ 'qos_policy_group': 'qos_policy_group_12',
+ 'space_reserve': False,
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successfully_modify_lun_9_10():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'storage/luns', SRR['one_lun']),
+ ('PATCH', 'storage/luns/1cd8a442-86d1-11e0-ae1c-123478563412', SRR['empty_records']),
+ ])
+ module_args = {
+ 'comment': 'carchi8py was here',
+ 'qos_policy_group': 'qos_policy_group_12',
+ 'space_allocation': False,
+ 'space_reserve': False,
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_modify_lun():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('PATCH', 'storage/luns/1cd8a442-86d1-11e0-ae1c-123478563412', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['comment'] = 'carchi8py was here'
+ my_obj.parameters['qos_policy_group'] = 'qos_policy_group_12'
+ my_obj.parameters['space_allocation'] = False
+ my_obj.parameters['space_reserve'] = False
+ my_obj.uuid = '1cd8a442-86d1-11e0-ae1c-123478563412'
+ modify = {'comment': 'carchi8py was here', 'qos_policy_group': 'qos_policy_group_12', 'space_reserve': False, 'space_allocation': False}
+ error = expect_and_capture_ansible_exception(my_obj.modify_lun_rest, 'fail', modify)['msg']
+ print('Info: %s' % error)
+ assert "Error modifying LUN /vol/volume1/qtree1/lun1: calling: storage/luns/1cd8a442-86d1-11e0-ae1c-123478563412: got Expected error." == error
+
+
+def test_error_modify_lun_missing_uuid():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['comment'] = 'carchi8py was here'
+ my_obj.parameters['qos_policy_group'] = 'qos_policy_group_12'
+ my_obj.parameters['space_allocation'] = False
+ my_obj.parameters['space_reserve'] = False
+ modify = {'comment': 'carchi8py was here', 'qos_policy_group': 'qos_policy_group_12', 'space_reserve': False, 'space_allocation': False}
+ error = expect_and_capture_ansible_exception(my_obj.modify_lun_rest, 'fail', modify)['msg']
+ print('Info: %s' % error)
+ assert "Error modifying LUN /vol/volume1/qtree1/lun1: UUID not found" == error
+
+
+def test_error_modify_lun_extra_option():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['comment'] = 'carchi8py was here'
+ my_obj.parameters['qos_policy_group'] = 'qos_policy_group_12'
+ my_obj.parameters['space_allocation'] = False
+ my_obj.parameters['space_reserve'] = False
+ my_obj.uuid = '1cd8a442-86d1-11e0-ae1c-123478563412'
+ modify = {'comment': 'carchi8py was here', 'qos_policy_group': 'qos_policy_group_12', 'space_reserve': False, 'space_allocation': False, 'fake': 'fake'}
+ error = expect_and_capture_ansible_exception(my_obj.modify_lun_rest, 'fail', modify)['msg']
+ print('Info: %s' % error)
+ assert "Error modifying LUN /vol/volume1/qtree1/lun1: Unknown parameters: {'fake': 'fake'}" == error
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_mcc_mediator.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_mcc_mediator.py
new file mode 100644
index 000000000..0259edf03
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_mcc_mediator.py
@@ -0,0 +1,124 @@
+# (c) 2020, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests for Ansible module: na_ontap_metrocluster '''
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat import unittest
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_mcc_mediator \
+ import NetAppOntapMccipMediator as mediator_module # module under test
+
+SRR = {
+ # common responses
+ 'is_rest': (200, {}, None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'end_of_sequence': (500, None, "Unexpected call to send_request"),
+ 'generic_error': (400, None, "Expected error"),
+ # module specific responses
+ 'get_mediator_with_no_results': (200, {'num_records': 0}, None),
+ 'get_mediator_with_results': (200, {
+ 'num_records': 1,
+ 'records': [{
+ 'ip_address': '10.10.10.10',
+ 'uuid': 'ebe27c49-1adf-4496-8335-ab862aebebf2'
+ }]
+ }, None)
+}
+
+
+class TestMyModule(unittest.TestCase):
+ """ Unit tests for na_ontap_metrocluster """
+
+ def setUp(self):
+ self.mock_mediator = {
+ 'mediator_address': '10.10.10.10',
+ 'mediator_user': 'carchi',
+ 'mediator_password': 'netapp1!'
+ }
+
+ def mock_args(self):
+ return {
+ 'mediator_address': self.mock_mediator['mediator_address'],
+ 'mediator_user': self.mock_mediator['mediator_user'],
+ 'mediator_password': self.mock_mediator['mediator_password'],
+ 'hostname': 'test_host',
+ 'username': 'test_user',
+ 'password': 'test_pass!'
+ }
+
+ def get_alias_mock_object(self):
+ alias_obj = mediator_module()
+ return alias_obj
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_successful_create(self, mock_request):
+ """Test successful rest create"""
+ data = self.mock_args()
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_mediator_with_no_results'],
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_alias_mock_object().apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_successful_create_idempotency(self, mock_request):
+ """Test successful rest create"""
+ data = self.mock_args()
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_mediator_with_results'],
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_alias_mock_object().apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_successful_delete(self, mock_request):
+ """Test successful rest create"""
+ data = self.mock_args()
+ data['state'] = 'absent'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_mediator_with_results'],
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_alias_mock_object().apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_successful_delete(self, mock_request):
+ """Test successful rest create"""
+ data = self.mock_args()
+ data['state'] = 'absent'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_mediator_with_no_results'],
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_alias_mock_object().apply()
+ assert not exc.value.args[0]['changed']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_metrocluster.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_metrocluster.py
new file mode 100644
index 000000000..5ccc3eb95
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_metrocluster.py
@@ -0,0 +1,117 @@
+# (c) 2020, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests for Ansible module: na_ontap_metrocluster '''
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat import unittest
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_metrocluster \
+ import NetAppONTAPMetroCluster as metrocluster_module # module under test
+
+# REST API canned responses when mocking send_request
+SRR = {
+ # common responses
+ 'is_rest': (200, {}, None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'end_of_sequence': (500, None, "Unexpected call to send_request"),
+ 'generic_error': (400, None, "Expected error"),
+ # module specific responses
+ 'get_metrocluster_with_results': (200, {"local": {
+ "cluster": {
+ 'name': 'cluster1'
+ },
+ "configuration_state": "configuration_error", # TODO: put correct state
+ "partner_cluster_reachable": "true",
+ }}, None),
+ 'get_metrocluster_with_no_results': (200, None, None),
+ 'metrocluster_post': (200, {'job': {
+ 'uuid': 'fde79888-692a-11ea-80c2-005056b39fe7',
+ '_links': {
+ 'self': {
+ 'href': '/api/cluster/jobs/fde79888-692a-11ea-80c2-005056b39fe7'}}}
+ }, None),
+ 'job': (200, {
+ "uuid": "cca3d070-58c6-11ea-8c0c-005056826c14",
+ "description": "POST /api/cluster/metrocluster",
+ "state": "success",
+ "message": "There are not enough disks in Pool1.",
+ "code": 2432836,
+ "start_time": "2020-02-26T10:35:44-08:00",
+ "end_time": "2020-02-26T10:47:38-08:00",
+ "_links": {
+ "self": {
+ "href": "/api/cluster/jobs/cca3d070-58c6-11ea-8c0c-005056826c14"
+ }
+ }
+ }, None)
+}
+
+
+class TestMyModule(unittest.TestCase):
+ """ Unit tests for na_ontap_metrocluster """
+
+ def setUp(self):
+ self.mock_metrocluster = {
+ 'partner_cluster_name': 'cluster1',
+ 'node_name': 'carchi_vsim1',
+ 'partner_node_name': 'carchi_vsim3'
+ }
+
+ def mock_args(self):
+ return {
+ 'dr_pairs': [{
+ 'node_name': self.mock_metrocluster['node_name'],
+ 'partner_node_name': self.mock_metrocluster['partner_node_name'],
+ }],
+ 'partner_cluster_name': self.mock_metrocluster['partner_cluster_name'],
+ 'hostname': 'test_host',
+ 'username': 'test_user',
+ 'password': 'test_pass!'
+ }
+
+ def get_alias_mock_object(self):
+ alias_obj = metrocluster_module()
+ return alias_obj
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_successful_create(self, mock_request):
+ """Test successful rest create"""
+ data = self.mock_args()
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_metrocluster_with_no_results'],
+ SRR['metrocluster_post'],
+ SRR['job'],
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_alias_mock_object().apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_create_idempotency(self, mock_request):
+ """Test rest create idempotency"""
+ data = self.mock_args()
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_metrocluster_with_results'],
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_alias_mock_object().apply()
+ assert not exc.value.args[0]['changed']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_metrocluster_dr_group.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_metrocluster_dr_group.py
new file mode 100644
index 000000000..2bcc558aa
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_metrocluster_dr_group.py
@@ -0,0 +1,164 @@
+# (c) 2020, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests for Ansible module: na_ontap_metrocluster '''
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat import unittest
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_metrocluster_dr_group \
+ import NetAppONTAPMetroClusterDRGroup as mcc_dr_pairs_module # module under test
+
+# REST API canned responses when mocking send_request
+SRR = {
+ # common responses
+ 'is_rest': (200, {}, None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'end_of_sequence': (500, None, "Unexpected call to send_request"),
+ 'generic_error': (400, None, "Expected error"),
+ # module specific responses
+ 'get_mcc_dr_pair_with_no_results': (200, {'records': [], 'num_records': 0}, None),
+ 'get_mcc_dr_pair_with_results': (200, {'records': [{'partner_cluster': {'name': 'rha2-b2b1_siteB'},
+ 'dr_pairs': [{'node': {'name': 'rha17-a2'},
+ 'partner': {'name': 'rha17-b2'}},
+ {'node': {'name': 'rha17-b2'},
+ 'partner': {'name': 'rha17-b1'}}],
+ 'id': '2'}],
+ 'num_records': 1}, None),
+ 'mcc_dr_pair_post': (200, {'job': {
+ 'uuid': 'fde79888-692a-11ea-80c2-005056b39fe7',
+ '_links': {
+ 'self': {
+ 'href': '/api/cluster/jobs/fde79888-692a-11ea-80c2-005056b39fe7'}}}
+ }, None),
+ 'get_mcc_dr_node': (200, {'records': [{'dr_group_id': '1'}], 'num_records': 1}, None),
+ 'get_mcc_dr_node_none': (200, {'records': [], 'num_records': 0}, None),
+ 'job': (200, {
+ "uuid": "cca3d070-58c6-11ea-8c0c-005056826c14",
+ "description": "POST /api/cluster/metrocluster",
+ "state": "success",
+ "message": "There are not enough disks in Pool1.",
+ "code": 2432836,
+ "start_time": "2020-02-26T10:35:44-08:00",
+ "end_time": "2020-02-26T10:47:38-08:00",
+ "_links": {
+ "self": {
+ "href": "/api/cluster/jobs/cca3d070-58c6-11ea-8c0c-005056826c14"
+ }
+ }
+ }, None)
+}
+
+
+class TestMyModule(unittest.TestCase):
+ """ Unit tests for na_ontap_metrocluster """
+
+ def setUp(self):
+ self.mock_mcc_dr_pair = {
+ 'partner_cluster_name': 'rha2-b2b1_siteB',
+ 'node_name': 'rha17-a2',
+ 'partner_node_name': 'rha17-b2',
+ 'node_name2': 'rha17-b2',
+ 'partner_node_name2': 'rha17-b1'
+
+ }
+
+ def mock_args(self):
+ return {
+ 'dr_pairs': [{
+ 'node_name': self.mock_mcc_dr_pair['node_name'],
+ 'partner_node_name': self.mock_mcc_dr_pair['partner_node_name'],
+ }, {
+ 'node_name': self.mock_mcc_dr_pair['node_name2'],
+ 'partner_node_name': self.mock_mcc_dr_pair['partner_node_name2'],
+ }],
+ 'partner_cluster_name': self.mock_mcc_dr_pair['partner_cluster_name'],
+ 'hostname': 'test_host',
+ 'username': 'test_user',
+ 'password': 'test_pass!'
+ }
+
+ def get_alias_mock_object(self):
+ alias_obj = mcc_dr_pairs_module()
+ return alias_obj
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_successful_create(self, mock_request):
+ """Test successful rest create"""
+ data = self.mock_args()
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_mcc_dr_pair_with_no_results'],
+ SRR['get_mcc_dr_pair_with_no_results'],
+ SRR['mcc_dr_pair_post'],
+ SRR['job'],
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_alias_mock_object().apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_create_idempotency(self, mock_request):
+ """Test rest create idempotency"""
+ data = self.mock_args()
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_mcc_dr_pair_with_results'],
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_alias_mock_object().apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_successful_delete(self, mock_request):
+ """Test successful rest delete"""
+ data = self.mock_args()
+ data['state'] = 'absent'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_mcc_dr_pair_with_results'],
+ SRR['mcc_dr_pair_post'],
+ SRR['job'],
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_alias_mock_object().apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_delete_idempotency(self, mock_request):
+ """Test rest delete idempotency"""
+ data = self.mock_args()
+ data['state'] = 'absent'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_mcc_dr_pair_with_no_results'],
+ SRR['get_mcc_dr_pair_with_no_results'],
+ SRR['get_mcc_dr_node_none'],
+ SRR['get_mcc_dr_node_none'],
+ SRR['job'],
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_alias_mock_object().apply()
+ assert not exc.value.args[0]['changed']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_motd.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_motd.py
new file mode 100644
index 000000000..64626e5ec
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_motd.py
@@ -0,0 +1,164 @@
+# (c) 2019-2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+""" unit tests for Ansible module: na_ontap_motd """
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import\
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_error_message, zapi_responses
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import\
+ assert_warning_was_raised, expect_and_capture_ansible_exception, call_main, create_module, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_motd import NetAppONTAPMotd as my_module, main as my_main # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+def motd_info(msg):
+ return {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'vserver-motd-info': {
+ 'message': msg,
+ 'vserver': 'ansible',
+ 'is-cluster-message-enabled': 'true'}}
+ }
+
+
+ZRR = zapi_responses({
+ 'motd_info': build_zapi_response(motd_info('motd_message')),
+ 'motd_none': build_zapi_response(motd_info('None')),
+})
+
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'use_rest': 'use_rest',
+ 'motd_message': 'motd_message',
+ 'vserver': 'ansible',
+}
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ register_responses([
+ ])
+ module_args = {
+ }
+ print('Info: %s' % call_main(my_main, module_args, fail=True)['msg'])
+
+
+def test_ensure_motd_get_called():
+ ''' fetching details of motd '''
+ register_responses([
+ ('ZAPI', 'vserver-motd-get-iter', ZRR['no_records']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.motd_get() is None
+
+
+def test_ensure_get_called_existing():
+ ''' test for existing motd'''
+ register_responses([
+ ('ZAPI', 'vserver-motd-get-iter', ZRR['motd_info']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.motd_get()
+
+
+def test_motd_create():
+ ''' test for creating motd'''
+ register_responses([
+ ('ZAPI', 'vserver-motd-get-iter', ZRR['no_records']),
+ ('ZAPI', 'vserver-motd-modify-iter', ZRR['success']),
+ # idempotency
+ ('ZAPI', 'vserver-motd-get-iter', ZRR['motd_info']),
+ # modify
+ ('ZAPI', 'vserver-motd-get-iter', ZRR['no_records']),
+ ('ZAPI', 'vserver-motd-modify-iter', ZRR['success']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ module_args['message'] = 'new_message'
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_motd_delete():
+ ''' test for deleting motd'''
+ register_responses([
+ ('ZAPI', 'vserver-motd-get-iter', ZRR['motd_info']),
+ ('ZAPI', 'vserver-motd-modify-iter', ZRR['motd_info']),
+ ('ZAPI', 'vserver-motd-get-iter', ZRR['no_records']),
+ ('ZAPI', 'vserver-motd-get-iter', ZRR['motd_none']),
+ ])
+ module_args = {
+ 'state': 'absent',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_if_all_methods_catch_exception():
+ register_responses([
+ ('ZAPI', 'vserver-motd-get-iter', ZRR['error']),
+ ('ZAPI', 'vserver-motd-modify-iter', ZRR['error']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert expect_and_capture_ansible_exception(my_obj.motd_get, 'fail')['msg'] == zapi_error_message('Error fetching motd info')
+ assert expect_and_capture_ansible_exception(my_obj.modify_motd, 'fail')['msg'] == zapi_error_message('Error creating motd')
+
+
+def test_rest_required():
+ module_args = {
+ 'use_rest': 'always',
+ }
+ error_msg = 'netapp.ontap.na_ontap_motd is deprecated and only supports ZAPI. Please use netapp.ontap.na_ontap_login_messages.'
+ assert call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg'] == 'Error: %s' % error_msg
+ register_responses([
+ ('ZAPI', 'vserver-motd-get-iter', ZRR['no_records']),
+ ('ZAPI', 'vserver-motd-modify-iter', ZRR['success']),
+ ('ZAPI', 'vserver-motd-get-iter', ZRR['no_records']),
+ ('ZAPI', 'vserver-motd-modify-iter', ZRR['success']),
+ ])
+ module_args = {
+ 'use_rest': 'auto',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert_warning_was_raised('Falling back to ZAPI: %s' % error_msg)
+ module_args = {
+ 'use_rest': 'NevEr',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert_warning_was_raised(error_msg)
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.HAS_NETAPP_LIB', False)
+def test_module_fail_when_netapp_lib_missing():
+ ''' required lib missing '''
+ module_args = {
+ 'use_rest': 'never',
+ }
+ assert 'Error: the python NetApp-Lib module is required. Import error: None' in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_name_mappings.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_name_mappings.py
new file mode 100644
index 000000000..5294a9537
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_name_mappings.py
@@ -0,0 +1,282 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+""" unit tests for Ansible module: na_ontap_name_mappings """
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ patch_ansible, create_module, create_and_apply, AnsibleFailJson
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_name_mappings \
+ import NetAppOntapNameMappings as my_module # module under test
+
+
+# REST API canned responses when mocking send_request
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+# REST API canned responses when mocking send_request
+SRR = rest_responses({
+ # module specific responses
+ 'mapping_record': (
+ 200,
+ {
+ "records": [
+ {
+ "client_match": "10.254.101.111/28",
+ "direction": "win_unix",
+ "index": 1,
+ "pattern": "ENGCIFS_AD_USER",
+ "replacement": "unix_user1",
+ "svm": {
+ "name": "svm1",
+ "uuid": "02c9e252-41be-11e9-81d5-00a0986138f7"
+ }
+ }
+ ],
+ "num_records": 1
+ }, None
+ ),
+ 'mapping_record1': (
+ 200,
+ {
+ "records": [
+ {
+ "direction": "win_unix",
+ "index": 2,
+ "pattern": "ENGCIFS_AD_USERS",
+ "replacement": "unix_user",
+ "svm": {
+ "name": "svm1",
+ "uuid": "02c9e252-41be-11e9-81d5-00a0986138f7"
+ }
+ }
+ ],
+ "num_records": 1
+ }, None
+ ),
+ "no_record": (
+ 200,
+ {"num_records": 0},
+ None)
+})
+
+
+DEFAULT_ARGS = {
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'vserver': 'svm1',
+ 'direction': 'win_unix',
+ 'index': '1'
+}
+
+
+def test_get_name_mappings_rest():
+ ''' Test retrieving name mapping record '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'name-services/name-mappings', SRR['mapping_record']),
+ ])
+ name_obj = create_module(my_module, DEFAULT_ARGS)
+ result = name_obj.get_name_mappings_rest()
+ assert result
+
+
+def test_error_get_name_mappings_rest():
+ ''' Test error retrieving name mapping record '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'name-services/name-mappings', SRR['generic_error']),
+ ])
+ error = create_and_apply(my_module, DEFAULT_ARGS, fail=True)['msg']
+ msg = "calling: name-services/name-mappings: got Expected error."
+ assert msg in error
+
+
+def test_error_direction_s3_choices():
+ ''' Test error when set s3 choices in older version '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96'])
+ ])
+ error = create_module(my_module, DEFAULT_ARGS, {'direction': 's3_unix'}, fail=True)['msg']
+ msg = "Error: direction s3_unix requires ONTAP 9.12.1 or later"
+ assert msg in error
+
+
+def test_create_name_mappings_rest():
+ ''' Test create name mapping record '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'name-services/name-mappings', SRR['empty_records']),
+ ('POST', 'name-services/name-mappings', SRR['empty_good']),
+ ])
+ module_args = {
+ "pattern": "ENGCIFS_AD_USER",
+ "replacement": "unix_user1",
+ "client_match": "10.254.101.111/28",
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_create_name_mappings_rest():
+ ''' Test error create name mapping record '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'name-services/name-mappings', SRR['empty_records']),
+ ('POST', 'name-services/name-mappings', SRR['generic_error']),
+ ])
+ module_args = {
+ "pattern": "ENGCIFS_AD_USER",
+ "replacement": "unix_user1",
+ "client_match": "10.254.101.111/28",
+ }
+ error = create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ msg = "Error on creating name mappings rest:"
+ assert msg in error
+
+
+def test_delete_name_mappings_rest():
+ ''' Test delete name mapping record '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'name-services/name-mappings', SRR['mapping_record']),
+ ('DELETE', 'name-services/name-mappings/02c9e252-41be-11e9-81d5-00a0986138f7/win_unix/1', SRR['empty_good']),
+ ])
+ module_args = {
+ 'state': 'absent'
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_delete_name_mappings_rest_error():
+ ''' Test error delete name mapping record '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'name-services/name-mappings', SRR['mapping_record']),
+ ('DELETE', 'name-services/name-mappings/02c9e252-41be-11e9-81d5-00a0986138f7/win_unix/1', SRR['generic_error']),
+ ])
+ module_args = {
+ 'state': 'absent'
+ }
+ error = create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ msg = "Error on deleting name mappings rest:"
+ assert msg in error
+
+
+def test_create_idempotent_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'name-services/name-mappings', SRR['mapping_record'])
+ ])
+ module_args = {
+ "pattern": "ENGCIFS_AD_USER",
+ "replacement": "unix_user1",
+ "client_match": "10.254.101.111/28",
+ }
+ assert not create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_delete_idempotent_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'name-services/name-mappings', SRR['empty_records'])
+ ])
+ module_args = {
+ 'state': 'absent'
+ }
+ assert not create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_name_mappings_pattern_rest():
+ ''' Test modify name mapping pattern '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'name-services/name-mappings', SRR['mapping_record']),
+ ('PATCH', 'name-services/name-mappings/02c9e252-41be-11e9-81d5-00a0986138f7/win_unix/1', SRR['empty_good']),
+ ])
+ module_args = {
+ "pattern": "ENGCIFS_AD_USERS",
+ "replacement": "unix_user2",
+ "client_match": "10.254.101.112/28",
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_name_mappings_replacement_rest():
+ ''' Test modify name mapping replacement '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'name-services/name-mappings', SRR['mapping_record1']),
+ ('PATCH', 'name-services/name-mappings/02c9e252-41be-11e9-81d5-00a0986138f7/win_unix/1', SRR['empty_good']),
+ ])
+ module_args = {
+ "replacement": "unix_user2"
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_name_mappings_client_match_rest():
+ ''' Test modify name mapping client match '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'name-services/name-mappings', SRR['mapping_record']),
+ ('PATCH', 'name-services/name-mappings/02c9e252-41be-11e9-81d5-00a0986138f7/win_unix/1', SRR['empty_good']),
+ ])
+ module_args = {
+ "client_match": "10.254.101.112/28",
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_modify_name_mappings_rest():
+ ''' Test error modify name mapping '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'name-services/name-mappings', SRR['mapping_record']),
+ ('PATCH', 'name-services/name-mappings/02c9e252-41be-11e9-81d5-00a0986138f7/win_unix/1', SRR['generic_error']),
+ ])
+ module_args = {
+ "pattern": "ENGCIFS_AD_USERS",
+ "replacement": "unix_user2",
+ "client_match": "10.254.101.112/28",
+ }
+ error = create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ msg = "Error on modifying name mappings rest:"
+ assert msg in error
+
+
+def test_swap_name_mappings_new_index_rest():
+ ''' Test swap name mapping positions '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'name-services/name-mappings', SRR['empty_records']),
+ ('GET', 'name-services/name-mappings', SRR['mapping_record1']),
+ ('PATCH', 'name-services/name-mappings/02c9e252-41be-11e9-81d5-00a0986138f7/win_unix/2', SRR['empty_good']),
+ ])
+ module_args = {
+ "index": "1",
+ "from_index": "2"
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_parameters_for_create_name_mappings_rest():
+ ''' Validate parameters for create name mapping record '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'name-services/name-mappings', SRR['empty_records']),
+ ])
+ module_args = {
+ "client_match": "10.254.101.111/28",
+ }
+ error = create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ msg = "Error creating name mappings for an SVM, pattern and replacement are required in create."
+ assert msg in error
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_name_service_switch.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_name_service_switch.py
new file mode 100644
index 000000000..3b91e9be7
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_name_service_switch.py
@@ -0,0 +1,181 @@
+# (c) 2019-2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests for Ansible module: na_ontap_name_service_switch '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+import sys
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import create_module,\
+ patch_ansible, create_and_apply, expect_and_capture_ansible_exception
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke,\
+ register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_responses
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_name_service_switch \
+ import NetAppONTAPNsswitch as nss_module # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+
+DEFAULT_ARGS = {
+ 'https': 'True',
+ 'use_rest': 'never',
+ 'state': 'present',
+ 'vserver': 'test_vserver',
+ 'database_type': 'namemap',
+ 'sources': 'files,ldap',
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!'
+}
+
+
+nss_info = {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'namservice-nsswitch-config-info': {
+ 'nameservice-database': 'namemap',
+ 'nameservice-sources': {'nss-source-type': 'files,ldap'}
+ }
+ }
+}
+
+
+ZRR = zapi_responses({
+ 'nss_info': build_zapi_response(nss_info)
+})
+
+
+def test_module_fail_when_required_args_missing():
+ # with python 2.6, dictionaries are not ordered
+ fragments = ["missing required arguments:", "hostname", "vserver", "database_type"]
+ error = create_module(nss_module, {}, fail=True)['msg']
+ for fragment in fragments:
+ assert fragment in error
+
+
+def test_get_nonexistent_nss():
+ register_responses([
+ ('nameservice-nsswitch-get-iter', ZRR['no_records'])
+ ])
+ nss_obj = create_module(nss_module, DEFAULT_ARGS)
+ assert nss_obj.get_name_service_switch() is None
+
+
+def test_get_existing_nss():
+ register_responses([
+ ('nameservice-nsswitch-get-iter', ZRR['nss_info'])
+ ])
+ nss_obj = create_module(nss_module, DEFAULT_ARGS)
+ assert nss_obj.get_name_service_switch()
+
+
+def test_successfully_create():
+ register_responses([
+ ('nameservice-nsswitch-get-iter', ZRR['no_records']),
+ ('nameservice-nsswitch-create', ZRR['success'])
+ ])
+ assert create_and_apply(nss_module, DEFAULT_ARGS)['changed']
+
+
+def test_successfully_modify():
+ register_responses([
+ ('nameservice-nsswitch-get-iter', ZRR['nss_info']),
+ ('nameservice-nsswitch-modify', ZRR['success'])
+ ])
+ assert create_and_apply(nss_module, DEFAULT_ARGS, {'sources': 'files'})['changed']
+
+
+def test_successfully_delete():
+ register_responses([
+ ('nameservice-nsswitch-get-iter', ZRR['nss_info']),
+ ('nameservice-nsswitch-destroy', ZRR['success'])
+ ])
+ assert create_and_apply(nss_module, DEFAULT_ARGS, {'state': 'absent'})['changed']
+
+
+def test_if_all_methods_catch_exception_zapi():
+ ''' test error zapi - get/create/modify/delete'''
+ register_responses([
+ ('nameservice-nsswitch-get-iter', ZRR['error']),
+ ('nameservice-nsswitch-create', ZRR['error']),
+ ('nameservice-nsswitch-modify', ZRR['error']),
+ ('nameservice-nsswitch-destroy', ZRR['error'])
+ ])
+ nss_obj = create_module(nss_module, DEFAULT_ARGS)
+
+ assert 'Error fetching name service switch' in expect_and_capture_ansible_exception(nss_obj.get_name_service_switch, 'fail')['msg']
+ assert 'Error on creating name service switch' in expect_and_capture_ansible_exception(nss_obj.create_name_service_switch, 'fail')['msg']
+ assert 'Error on modifying name service switch' in expect_and_capture_ansible_exception(nss_obj.modify_name_service_switch, 'fail', {})['msg']
+ assert 'Error on deleting name service switch' in expect_and_capture_ansible_exception(nss_obj.delete_name_service_switch, 'fail')['msg']
+
+
+SRR = rest_responses({
+ 'nss_info': (200, {"records": [
+ {
+ 'nsswitch': {
+ 'group': ['files'],
+ 'hosts': ['files', 'dns'],
+ 'namemap': ['files'],
+ 'netgroup': ['files'],
+ 'passwd': ['files']
+ },
+ 'uuid': '6647fa13'}
+ ], 'num_records': 1}, None),
+ 'nss_info_no_record': (200, {"records": [
+ {'uuid': '6647fa13'}
+ ], 'num_records': 1}, None),
+ 'svm_uuid': (200, {"records": [
+ {'uuid': '6647fa13'}
+ ], "num_records": 1}, None)
+})
+
+
+def test_successfully_modify_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'svm/svms', SRR['nss_info_no_record']),
+ ('PATCH', 'svm/svms/6647fa13', SRR['success']),
+ ])
+ args = {'sources': 'files', 'use_rest': 'always'}
+ assert create_and_apply(nss_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_error_get_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'svm/svms', SRR['zero_records'])
+ ])
+ error = "Error: Specified vserver test_vserver not found"
+ assert error in create_and_apply(nss_module, DEFAULT_ARGS, {'use_rest': 'always'}, fail=True)['msg']
+
+
+def test_error_delete_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'svm/svms', SRR['nss_info'])
+ ])
+ args = {'state': 'absent', 'use_rest': 'always'}
+ error = "Error: deleting name service switch not supported in REST."
+ assert error in create_and_apply(nss_module, DEFAULT_ARGS, args, fail=True)['msg']
+
+
+def test_if_all_methods_catch_exception_rest():
+ ''' test error rest - get/modify'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'svm/svms', SRR['generic_error']),
+ ('PATCH', 'svm/svms/6647fa13', SRR['generic_error']),
+ ])
+ nss_obj = create_module(nss_module, DEFAULT_ARGS, {'use_rest': 'always'})
+ nss_obj.svm_uuid = '6647fa13'
+ assert 'Error fetching name service switch' in expect_and_capture_ansible_exception(nss_obj.get_name_service_switch, 'fail')['msg']
+ assert 'Error on modifying name service switch' in expect_and_capture_ansible_exception(nss_obj.modify_name_service_switch_rest, 'fail')['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ndmp.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ndmp.py
new file mode 100644
index 000000000..78278bc7b
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ndmp.py
@@ -0,0 +1,196 @@
+# (c) 2019, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test template for ONTAP Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat import unittest
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch, Mock
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_ndmp \
+ import NetAppONTAPNdmp as ndmp_module # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+# REST API canned responses when mocking send_request
+SRR = {
+ # common responses
+ 'is_rest': (200, {}, None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'get_uuid': (200, {'records': [{'uuid': 'testuuid'}]}, None),
+ 'empty_good': (200, {}, None),
+ 'end_of_sequence': (500, None, "Unexpected call to send_request"),
+ 'generic_error': (400, None, 'Error fetching ndmp from ansible: NetApp API failed. Reason - Unexpected error:',
+ "REST API currently does not support 'backup_log_enable, ignore_ctime_enabled'"),
+ 'get_ndmp_uuid': (200, {"records": [{"svm": {"name": "svm1", "uuid": "02c9e252-41be-11e9-81d5-00a0986138f7"}}]}, None),
+ 'get_ndmp': (200, {"enabled": True, "authentication_types": ["test"],
+ "records": [{"svm": {"name": "svm1", "uuid": "02c9e252-41be-11e9-81d5-00a0986138f7"}}]}, None)
+}
+
+
+class MockONTAPConnection(object):
+ ''' mock server connection to ONTAP host '''
+
+ def __init__(self, kind=None, data=None):
+ ''' save arguments '''
+ self.type = kind
+ self.data = data
+ self.xml_in = None
+ self.xml_out = None
+
+ def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
+ ''' mock invoke_successfully returning xml data '''
+ self.xml_in = xml
+ if self.type == 'ndmp':
+ xml = self.build_ndmp_info(self.data)
+ if self.type == 'error':
+ error = netapp_utils.zapi.NaApiError('test', 'error')
+ raise error
+ self.xml_out = xml
+ return xml
+
+ @staticmethod
+ def build_ndmp_info(ndmp_details):
+ ''' build xml data for ndmp '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ attributes = {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'ndmp-vserver-attributes-info': {
+ 'ignore_ctime_enabled': ndmp_details['ignore_ctime_enabled'],
+ 'backup_log_enable': ndmp_details['backup_log_enable'],
+
+ 'authtype': [
+ {'ndmpd-authtypes': 'plaintext'},
+ {'ndmpd-authtypes': 'challenge'}
+ ]
+ }
+ }
+ }
+ xml.translate_struct(attributes)
+ return xml
+
+
+class TestMyModule(unittest.TestCase):
+ ''' a group of related Unit Tests '''
+
+ def setUp(self):
+ self.mock_ndmp = {
+ 'ignore_ctime_enabled': True,
+ 'backup_log_enable': 'false',
+ 'authtype': 'plaintext',
+ 'enable': True
+ }
+
+ def mock_args(self, rest=False):
+ if rest:
+ return {
+ 'authtype': self.mock_ndmp['authtype'],
+ 'enable': True,
+ 'vserver': 'ansible',
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'https': 'False'
+ }
+ else:
+ return {
+ 'vserver': 'ansible',
+ 'authtype': self.mock_ndmp['authtype'],
+ 'ignore_ctime_enabled': self.mock_ndmp['ignore_ctime_enabled'],
+ 'backup_log_enable': self.mock_ndmp['backup_log_enable'],
+ 'enable': self.mock_ndmp['enable'],
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'use_rest': 'never'
+ }
+
+ def get_ndmp_mock_object(self, kind=None, cx_type='zapi'):
+ """
+ Helper method to return an na_ontap_ndmp object
+ :param kind: passes this param to MockONTAPConnection()
+ :return: na_ontap_ndmp object
+ """
+ obj = ndmp_module()
+ if cx_type == 'zapi':
+ obj.asup_log_for_cserver = Mock(return_value=None)
+ obj.server = Mock()
+ obj.server.invoke_successfully = Mock()
+ if kind is None:
+ obj.server = MockONTAPConnection()
+ else:
+ obj.server = MockONTAPConnection(kind=kind, data=self.mock_ndmp)
+ return obj
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_ndmp.NetAppONTAPNdmp.ndmp_get_iter')
+ def test_successful_modify(self, ger_ndmp):
+ ''' Test successful modify ndmp'''
+ data = self.mock_args()
+ set_module_args(data)
+ current = {
+ 'ignore_ctime_enabled': False,
+ 'backup_log_enable': True
+ }
+ ger_ndmp.side_effect = [
+ current
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_ndmp_mock_object('ndmp').apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_ndmp.NetAppONTAPNdmp.ndmp_get_iter')
+ def test_modify_error(self, ger_ndmp):
+ ''' Test modify error '''
+ data = self.mock_args()
+ set_module_args(data)
+ current = {
+ 'ignore_ctime_enabled': False,
+ 'backup_log_enable': True
+ }
+ ger_ndmp.side_effect = [
+ current
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_ndmp_mock_object('error').apply()
+ assert exc.value.args[0]['msg'] == 'Error modifying ndmp on ansible: NetApp API failed. Reason - test:error'
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_error(self, mock_request):
+ data = self.mock_args()
+ data['use_rest'] = 'Always'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['generic_error'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_ndmp_mock_object(cx_type='rest').apply()
+ assert exc.value.args[0]['msg'] == SRR['generic_error'][3]
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_successfully_modify(self, mock_request):
+ data = self.mock_args(rest=True)
+ data['use_rest'] = 'Always'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'], # Was not called because of Always, but we now call it :)
+ SRR['get_ndmp_uuid'], # for get svm uuid: protocols/ndmp/svms
+ SRR['get_ndmp'], # for get ndmp details: '/protocols/ndmp/svms/' + uuid
+ SRR['get_ndmp_uuid'], # for get svm uuid: protocols/ndmp/svms (before modify)
+ SRR['empty_good'], # modify (patch)
+ SRR['end_of_sequence'],
+ ]
+ my_obj = ndmp_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_net_ifgrp.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_net_ifgrp.py
new file mode 100644
index 000000000..7e3e58783
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_net_ifgrp.py
@@ -0,0 +1,737 @@
+# (c) 2018, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test template for ONTAP Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+import sys
+
+from ansible_collections.netapp.ontap.tests.unit.compat import unittest
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch, Mock
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import assert_no_warnings, set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_net_ifgrp \
+ import NetAppOntapIfGrp as ifgrp_module # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not be available')
+
+
+class MockONTAPConnection(object):
+ ''' mock server connection to ONTAP host '''
+
+ def __init__(self, kind=None, data=None):
+ ''' save arguments '''
+ self.kind = kind
+ self.params = data
+ self.xml_in = None
+ self.xml_out = None
+
+ def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
+ ''' mock invoke_successfully returning xml data '''
+ self.xml_in = xml
+ if self.kind == 'ifgrp':
+ xml = self.build_ifgrp_info(self.params)
+ elif self.kind == 'ifgrp-ports':
+ xml = self.build_ifgrp_ports_info(self.params)
+ elif self.kind == 'ifgrp-fail':
+ raise netapp_utils.zapi.NaApiError(code='TEST', message="This exception is from the unit test")
+ self.xml_out = xml
+ return xml
+
+ @staticmethod
+ def build_ifgrp_info(ifgrp_details):
+ ''' build xml data for ifgrp-info '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ attributes = {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'net-port-info': {
+ 'port': ifgrp_details['name'],
+ 'ifgrp-distribution-function': 'mac',
+ 'ifgrp-mode': ifgrp_details['mode'],
+ 'node': ifgrp_details['node']
+ }
+ }
+ }
+ xml.translate_struct(attributes)
+ return xml
+
+ @staticmethod
+ def build_ifgrp_ports_info(data):
+ ''' build xml data for ifgrp-ports '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ attributes = {
+ 'attributes': {
+ 'net-ifgrp-info': {
+ 'ports': [
+ {'lif-bindable': data['ports'][0]},
+ {'lif-bindable': data['ports'][1]},
+ {'lif-bindable': data['ports'][2]}
+ ]
+ }
+ }
+ }
+ xml.translate_struct(attributes)
+ return xml
+
+
+class TestMyModule(unittest.TestCase):
+ ''' a group of related Unit Tests '''
+
+ def setUp(self):
+ self.mock_ifgrp = {
+ 'name': 'test',
+ 'port': 'a1',
+ 'node': 'test_vserver',
+ 'mode': 'something'
+ }
+
+ def mock_args(self):
+ return {
+ 'name': self.mock_ifgrp['name'],
+ 'distribution_function': 'mac',
+ 'ports': [self.mock_ifgrp['port']],
+ 'node': self.mock_ifgrp['node'],
+ 'mode': self.mock_ifgrp['mode'],
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'feature_flags': {'no_cserver_ems': True},
+ 'use_rest': 'never'
+ }
+
+ def get_ifgrp_mock_object(self, kind=None, data=None):
+ """
+ Helper method to return an na_ontap_net_ifgrp object
+ :param kind: passes this param to MockONTAPConnection()
+ :return: na_ontap_net_ifgrp object
+ """
+ obj = ifgrp_module()
+ obj.autosupport_log = Mock(return_value=None)
+ if data is None:
+ data = self.mock_ifgrp
+ obj.server = MockONTAPConnection(kind=kind, data=data)
+ return obj
+
+ def test_module_fail_when_required_args_missing(self):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ ifgrp_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+ def test_get_nonexistent_ifgrp(self):
+ ''' Test if get_ifgrp returns None for non-existent ifgrp '''
+ set_module_args(self.mock_args())
+ result = self.get_ifgrp_mock_object().get_if_grp()
+ assert result is None
+
+ def test_get_existing_ifgrp(self):
+ ''' Test if get_ifgrp returns details for existing ifgrp '''
+ set_module_args(self.mock_args())
+ result = self.get_ifgrp_mock_object('ifgrp').get_if_grp()
+ assert result['name'] == self.mock_ifgrp['name']
+
+ def test_successful_create(self):
+ ''' Test successful create '''
+ data = self.mock_args()
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_ifgrp_mock_object().apply()
+ assert exc.value.args[0]['changed']
+
+ def test_successful_delete(self):
+ ''' Test delete existing volume '''
+ data = self.mock_args()
+ data['state'] = 'absent'
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_ifgrp_mock_object('ifgrp').apply()
+ assert exc.value.args[0]['changed']
+
+ def test_successful_modify(self):
+ ''' Test delete existing volume '''
+ data = self.mock_args()
+ data['ports'] = ['1', '2', '3']
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_ifgrp_mock_object('ifgrp').apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_net_ifgrp.NetAppOntapIfGrp.get_if_grp')
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_net_ifgrp.NetAppOntapIfGrp.create_if_grp')
+ def test_create_called(self, create_ifgrp, get_ifgrp):
+ data = self.mock_args()
+ set_module_args(data)
+ get_ifgrp.return_value = None
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_ifgrp_mock_object().apply()
+ get_ifgrp.assert_called_with()
+ create_ifgrp.assert_called_with()
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_net_ifgrp.NetAppOntapIfGrp.add_port_to_if_grp')
+ def test_if_ports_are_added_after_create(self, add_ports):
+ ''' Test successful create '''
+ data = self.mock_args()
+ set_module_args(data)
+ self.get_ifgrp_mock_object().create_if_grp()
+ add_ports.assert_called_with('a1')
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_net_ifgrp.NetAppOntapIfGrp.get_if_grp')
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_net_ifgrp.NetAppOntapIfGrp.delete_if_grp')
+ def test_delete_called(self, delete_ifgrp, get_ifgrp):
+ data = self.mock_args()
+ data['state'] = 'absent'
+ set_module_args(data)
+ get_ifgrp.return_value = Mock()
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_ifgrp_mock_object().apply()
+ get_ifgrp.assert_called_with()
+ delete_ifgrp.assert_called_with(None)
+
+ def test_get_return_value(self):
+ data = self.mock_args()
+ set_module_args(data)
+ result = self.get_ifgrp_mock_object('ifgrp').get_if_grp()
+ assert result['name'] == data['name']
+ assert result['mode'] == data['mode']
+ assert result['node'] == data['node']
+
+ def test_get_ports_list(self):
+ data = self.mock_args()
+ data['ports'] = ['e0a', 'e0b', 'e0c']
+ set_module_args(data)
+ result = self.get_ifgrp_mock_object('ifgrp-ports', data).get_if_grp_ports()
+ assert result['ports'] == data['ports']
+
+ def test_add_port_packet(self):
+ data = self.mock_args()
+ set_module_args(data)
+ obj = self.get_ifgrp_mock_object('ifgrp')
+ obj.add_port_to_if_grp('addme')
+ assert obj.server.xml_in['port'] == 'addme'
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_net_ifgrp.NetAppOntapIfGrp.remove_port_to_if_grp')
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_net_ifgrp.NetAppOntapIfGrp.add_port_to_if_grp')
+ def test_modify_ports_calls_remove_existing_ports(self, add_port, remove_port):
+ ''' Test if already existing ports are not being added again '''
+ data = self.mock_args()
+ data['ports'] = ['1', '2']
+ set_module_args(data)
+ self.get_ifgrp_mock_object('ifgrp').modify_ports(current_ports=['1', '2', '3'])
+ assert remove_port.call_count == 1
+ assert add_port.call_count == 0
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_net_ifgrp.NetAppOntapIfGrp.remove_port_to_if_grp')
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_net_ifgrp.NetAppOntapIfGrp.add_port_to_if_grp')
+ def test_modify_ports_calls_add_new_ports(self, add_port, remove_port):
+ ''' Test new ports are added '''
+ data = self.mock_args()
+ data['ports'] = ['1', '2', '3', '4']
+ set_module_args(data)
+ self.get_ifgrp_mock_object('ifgrp').modify_ports(current_ports=['1', '2'])
+ assert remove_port.call_count == 0
+ assert add_port.call_count == 2
+
+ def test_get_ports_returns_none(self):
+ set_module_args(self.mock_args())
+ result = self.get_ifgrp_mock_object().get_if_grp_ports()
+ assert result['ports'] == []
+ result = self.get_ifgrp_mock_object().get_if_grp()
+ assert result is None
+
+ def test_if_all_methods_catch_exception(self):
+ set_module_args(self.mock_args())
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_ifgrp_mock_object('ifgrp-fail').get_if_grp()
+ assert 'Error getting if_group test' in exc.value.args[0]['msg']
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_ifgrp_mock_object('ifgrp-fail').create_if_grp()
+ assert 'Error creating if_group test' in exc.value.args[0]['msg']
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_ifgrp_mock_object('ifgrp-fail').get_if_grp_ports()
+ assert 'Error getting if_group ports test' in exc.value.args[0]['msg']
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_ifgrp_mock_object('ifgrp-fail').add_port_to_if_grp('test-port')
+ assert 'Error adding port test-port to if_group test' in exc.value.args[0]['msg']
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_ifgrp_mock_object('ifgrp-fail').remove_port_to_if_grp('test-port')
+ assert 'Error removing port test-port to if_group test' in exc.value.args[0]['msg']
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_ifgrp_mock_object('ifgrp-fail').delete_if_grp()
+ assert 'Error deleting if_group test' in exc.value.args[0]['msg']
+
+
+def default_args():
+ args = {
+ 'state': 'present',
+ 'hostname': '10.10.10.10',
+ 'username': 'admin',
+ 'https': 'true',
+ 'validate_certs': 'false',
+ 'password': 'password',
+ 'use_rest': 'always'
+ }
+ return args
+
+
+# REST API canned responses when mocking send_request
+SRR = {
+ # common responses
+ 'is_rest': (200, dict(version=dict(generation=9, major=9, minor=0, full='dummy')), None),
+ 'is_rest_9_6': (200, dict(version=dict(generation=9, major=6, minor=0, full='dummy')), None),
+ 'is_rest_9_7': (200, dict(version=dict(generation=9, major=7, minor=0, full='dummy')), None),
+ 'is_rest_9_8': (200, dict(version=dict(generation=9, major=8, minor=0, full='dummy')), None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'zero_record': (200, dict(records=[], num_records=0), None),
+ 'one_record_uuid': (200, dict(records=[dict(uuid='a1b2c3')], num_records=1), None),
+ 'end_of_sequence': (500, None, "Unexpected call to send_request"),
+ 'generic_error': (400, None, "Expected error"),
+ 'ifgrp_record': (200, {
+ "num_records": 2,
+ "records": [
+ {
+ 'lag': {
+ 'distribution_policy': 'ip',
+ 'mode': 'multimode_lacp'
+ },
+ 'name': 'a0b',
+ 'node': {'name': 'mohan9cluster2-01'},
+ 'type': 'lag',
+ 'uuid': '1b830a46-47cd-11ec-90df-005056b3dfc8'
+ },
+ {
+ 'broadcast_domain': {
+ 'ipspace': {'name': 'ip1'},
+ 'name': 'test1'
+ },
+ 'lag': {
+ 'distribution_policy': 'ip',
+ 'member_ports': [
+ {
+ 'name': 'e0d',
+ 'node': {'name': 'mohan9cluster2-01'},
+ }],
+ 'mode': 'multimode_lacp'},
+ 'name': 'a0d',
+ 'node': {'name': 'mohan9cluster2-01'},
+ 'type': 'lag',
+ 'uuid': '5aeebc96-47d7-11ec-90df-005056b3dfc8'
+ },
+ {
+ 'broadcast_domain': {
+ 'ipspace': {'name': 'ip1'},
+ 'name': 'test1'
+ },
+ 'lag': {
+ 'distribution_policy': 'ip',
+ 'member_ports': [
+ {
+ 'name': 'e0c',
+ 'node': {'name': 'mohan9cluster2-01'},
+ },
+ {
+ 'name': 'e0a',
+ 'node': {'name': 'mohan9cluster2-01'},
+ }],
+ 'mode': 'multimode_lacp'
+ },
+ 'name': 'a0d',
+ 'node': {'name': 'mohan9cluster2-01'},
+ 'type': 'lag',
+ 'uuid': '5aeebc96-47d7-11ec-90df-005056b3dsd4'
+ }]
+ }, None),
+ 'ifgrp_record_create': (200, {
+ "num_records": 1,
+ "records": [
+ {
+ 'lag': {
+ 'distribution_policy': 'ip',
+ 'mode': 'multimode_lacp'
+ },
+ 'name': 'a0b',
+ 'node': {'name': 'mohan9cluster2-01'},
+ 'type': 'lag',
+ 'uuid': '1b830a46-47cd-11ec-90df-005056b3dfc8'
+ }]
+ }, None),
+ 'ifgrp_record_modify': (200, {
+ "num_records": 1,
+ "records": [
+ {
+ 'broadcast_domain': {
+ 'ipspace': {'name': 'ip1'},
+ 'name': 'test1'
+ },
+ 'lag': {
+ 'distribution_policy': 'ip',
+ 'member_ports': [
+ {
+ 'name': 'e0c',
+ 'node': {'name': 'mohan9cluster2-01'},
+ },
+ {
+ 'name': 'e0d',
+ 'node': {'name': 'mohan9cluster2-01'},
+ }],
+ 'mode': 'multimode_lacp'
+ },
+ 'name': 'a0d',
+ 'node': {'name': 'mohan9cluster2-01'},
+ 'type': 'lag',
+ 'uuid': '5aeebc96-47d7-11ec-90df-005056b3dsd4'
+ }]
+ }, None)
+}
+
+
+def test_module_fail_when_required_args_missing(patch_ansible):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args(dict(hostname=''))
+ ifgrp_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+ msg = 'missing required arguments:'
+ assert msg in exc.value.args[0]['msg']
+
+
+def test_module_fail_when_broadcast_domain_ipspace(patch_ansible):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args(dict(hostname=''))
+ ifgrp_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+ msg = 'missing required arguments:'
+ assert msg in exc.value.args[0]['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_module_fail_broadcast_domain_ipspace_rest_ontap96(mock_request, patch_ansible):
+ '''throw error if broadcast_domain and ipspace are not set'''
+ args = dict(default_args())
+ args['ports'] = "e0c"
+ args['distribution_function'] = "ip"
+ args['mode'] = "multimode_lacp"
+ args['node'] = "mohan9cluster2-01"
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_6'], # get version
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ ifgrp_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+ msg = 'are mandatory fields with ONTAP 9.6 and 9.7'
+ assert msg in exc.value.args[0]['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_module_fail_broadcast_domain_ipspace_rest_required_together(mock_request, patch_ansible):
+ '''throw error if one of broadcast_domain or ipspace only set'''
+ args = dict(default_args())
+ args['ports'] = "e0c"
+ args['distribution_function'] = "ip"
+ args['ipspace'] = "Default"
+ args['mode'] = "multimode_lacp"
+ args['node'] = "mohan9cluster2-01"
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_6'], # get version
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ ifgrp_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+ msg = 'parameters are required together: broadcast_domain, ipspace'
+ assert msg in exc.value.args[0]['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_module_fail_ifgrp_not_found_from_lag_ports(mock_request, patch_ansible):
+ ''' throw error if lag not found with both ports and from_lag_ports '''
+ args = dict(default_args())
+ args['node'] = "mohan9-vsim1"
+ args['ports'] = "e0f"
+ args['from_lag_ports'] = "e0l"
+ args['distribution_function'] = "ip"
+ args['mode'] = "multimode_lacp"
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['ifgrp_record'] # get for ports
+ ]
+ my_obj = ifgrp_module()
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ msg = "Error: cannot find LAG matching from_lag_ports: '['e0l']'."
+ assert msg in exc.value.args[0]['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_module_fail_from_lag_ports_1_or_more_ports_not_in_current(mock_request, patch_ansible):
+ ''' throw error if 1 or more from_lag_ports not found in current '''
+ args = dict(default_args())
+ args['node'] = "mohan9-vsim1"
+ args['ports'] = "e0f"
+ args['from_lag_ports'] = "e0d,e0h"
+ args['distribution_function'] = "ip"
+ args['mode'] = "multimode_lacp"
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ ]
+ my_obj = ifgrp_module()
+ my_obj.current_records = SRR['ifgrp_record'][1]['records']
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ msg = "Error: cannot find LAG matching from_lag_ports: '['e0d', 'e0h']'."
+ assert msg in exc.value.args[0]['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_module_fail_from_lag_ports_are_in_different_LAG(mock_request, patch_ansible):
+ ''' throw error if ports in from_lag_ports are in different LAG '''
+ args = dict(default_args())
+ args['node'] = "mohan9-vsim1"
+ args['ports'] = "e0f"
+ args['from_lag_ports'] = "e0d,e0c"
+ args['distribution_function'] = "ip"
+ args['mode'] = "multimode_lacp"
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['ifgrp_record'] # get
+ ]
+ my_obj = ifgrp_module()
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ msg = "'e0d, e0c' are in different LAG"
+ assert msg in exc.value.args[0]['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_module_try_to_delete_only_partial_match_found(mock_request, patch_ansible):
+ ''' delete only with exact match of ports'''
+ args = dict(default_args())
+ args['node'] = "mohan9cluster2-01"
+ args['ports'] = "e0c"
+ args['distribution_function'] = "ip"
+ args['mode'] = "multimode_lacp"
+ args['broadcast_domain'] = "test1"
+ args['ipspace'] = "ip1"
+ args['state'] = 'absent'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['ifgrp_record'], # get
+ ]
+ my_obj = ifgrp_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is False
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_module_try_to_delete_ports_in_different_LAG(mock_request, patch_ansible):
+ ''' if ports are in different LAG, not to delete and returk ok'''
+ args = dict(default_args())
+ args['node'] = "mohan9cluster2-01"
+ args['ports'] = "e0c,e0d"
+ args['distribution_function'] = "ip"
+ args['mode'] = "multimode_lacp"
+ args['broadcast_domain'] = "test1"
+ args['ipspace'] = "ip1"
+ args['state'] = 'absent'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['ifgrp_record'], # get
+ ]
+ my_obj = ifgrp_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is False
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_module_fail_partial_match(mock_request, patch_ansible):
+ '''fail if partial match only found in from_lag_ports'''
+ args = dict(default_args())
+ args['node'] = "mohan9cluster2-01"
+ args['from_lag_ports'] = "e0c,e0a,e0v"
+ args['ports'] = "e0n"
+ args['distribution_function'] = "ip"
+ args['mode'] = "multimode_lacp"
+ args['state'] = 'present'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['ifgrp_record'], # get
+ ]
+ my_obj = ifgrp_module()
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ msg = "Error: cannot find LAG matching from_lag_ports: '['e0c', 'e0a', 'e0v']'."
+ assert msg in exc.value.args[0]['msg']
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_module_fail_partial_match_ports_empty_record_from_lag_ports(mock_request, patch_ansible):
+ ''' remove port e0a from ifgrp a0d with ports e0d,e0c'''
+ args = dict(default_args())
+ args['node'] = "mohan9cluster2-01"
+ args['ports'] = "e0c"
+ args['from_lag_ports'] = "e0k"
+ args['distribution_function'] = "ip"
+ args['mode'] = "multimode_lacp"
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['ifgrp_record_modify'] # get
+ ]
+ my_obj = ifgrp_module()
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ msg = "Error: cannot find LAG matching from_lag_ports: '['e0k']'."
+ assert msg in exc.value.args[0]['msg']
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_create_ifgrp_port(mock_request, patch_ansible):
+ ''' test create ifgrp '''
+ args = dict(default_args())
+ args['node'] = "mohan9-vsim1"
+ args['ports'] = "e0c,e0a"
+ args['distribution_function'] = "ip"
+ args['mode'] = "multimode_lacp"
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['ifgrp_record_create'], # get
+ SRR['empty_good'], # create
+ SRR['end_of_sequence']
+ ]
+ my_obj = ifgrp_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is True
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_create_ifgrp_port_idempotent(mock_request, patch_ansible):
+ ''' test create ifgrp idempotent '''
+ args = dict(default_args())
+ args['node'] = "mohan9cluster2-01"
+ args['ports'] = "e0c,e0a"
+ args['distribution_function'] = "ip"
+ args['mode'] = "multimode_lacp"
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['ifgrp_record'], # get
+ SRR['end_of_sequence']
+ ]
+ my_obj = ifgrp_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is False
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_modify_ifgrp_port(mock_request, patch_ansible):
+ ''' remove port e0a from ifgrp a0d with ports e0d,e0c'''
+ args = dict(default_args())
+ args['node'] = "mohan9cluster2-01"
+ args['ports'] = "e0c"
+ args['from_lag_ports'] = "e0c,e0d"
+ args['distribution_function'] = "ip"
+ args['mode'] = "multimode_lacp"
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['ifgrp_record_modify'], # get
+ SRR['empty_good'], # modify
+ SRR['end_of_sequence']
+ ]
+ my_obj = ifgrp_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is True
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_modify_ifgrp_broadcast_domain(mock_request, patch_ansible):
+ ''' modify broadcast domain and ipspace'''
+ args = dict(default_args())
+ args['node'] = "mohan9cluster2-01"
+ args['ports'] = "e0c,e0a"
+ args['from_lag_ports'] = 'e0c'
+ args['distribution_function'] = "ip"
+ args['mode'] = "multimode_lacp"
+ args['broadcast_domain'] = "test1"
+ args['ipspace'] = "Default"
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['ifgrp_record'], # get
+ SRR['empty_good'], # modify
+ SRR['end_of_sequence']
+ ]
+ my_obj = ifgrp_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is True
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_delete_ifgrp(mock_request, patch_ansible):
+ ''' test delete LAG'''
+ args = dict(default_args())
+ args['node'] = "mohan9cluster2-01"
+ args['ports'] = "e0c,e0a"
+ args['distribution_function'] = "ip"
+ args['mode'] = "multimode_lacp"
+ args['broadcast_domain'] = "test1"
+ args['ipspace'] = "ip1"
+ args['state'] = 'absent'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['ifgrp_record'], # get
+ SRR['empty_good'], # delete
+ SRR['end_of_sequence']
+ ]
+ my_obj = ifgrp_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is True
+ assert_no_warnings()
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_net_port.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_net_port.py
new file mode 100644
index 000000000..b58e02d1b
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_net_port.py
@@ -0,0 +1,331 @@
+# (c) 2018-2021, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test template for ONTAP Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+import sys
+
+from ansible_collections.netapp.ontap.tests.unit.compat import unittest
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch, Mock
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import assert_no_warnings, set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_net_port \
+ import NetAppOntapNetPort as port_module # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not be available')
+
+
+class MockONTAPConnection(object):
+ ''' mock server connection to ONTAP host '''
+
+ def __init__(self, kind=None, data=None):
+ ''' save arguments '''
+ self.type = kind
+ self.data = data
+ self.xml_in = None
+ self.xml_out = None
+
+ def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
+ ''' mock invoke_successfully returning xml data '''
+ if self.type == 'raise':
+ raise netapp_utils.zapi.NaApiError(code='1111', message='forcing an error')
+ self.xml_in = xml
+ if self.type == 'port':
+ xml = self.build_port_info(self.data)
+ self.xml_out = xml
+ return xml
+
+ @staticmethod
+ def build_port_info(port_details):
+ ''' build xml data for net-port-info '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ attributes = {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'net-port-info': {
+ # 'port': port_details['port'],
+ 'mtu': str(port_details['mtu']),
+ 'is-administrative-auto-negotiate': 'true',
+ 'is-administrative-up': str(port_details['up_admin']).lower(), # ZAPI uses 'true', 'false'
+ 'ipspace': 'default',
+ 'administrative-flowcontrol': port_details['flowcontrol_admin'],
+ 'node': port_details['node']
+ }
+ }
+ }
+ xml.translate_struct(attributes)
+ return xml
+
+
+class TestMyModule(unittest.TestCase):
+ ''' a group of related Unit Tests '''
+
+ def setUp(self):
+ self.server = MockONTAPConnection()
+ self.mock_port = {
+ 'node': 'test',
+ 'ports': 'a1',
+ 'up_admin': True,
+ 'flowcontrol_admin': 'something',
+ 'mtu': 1000
+ }
+
+ def mock_args(self):
+ return {
+ 'node': self.mock_port['node'],
+ 'flowcontrol_admin': self.mock_port['flowcontrol_admin'],
+ 'ports': [self.mock_port['ports']],
+ 'mtu': self.mock_port['mtu'],
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'feature_flags': {'no_cserver_ems': True},
+ 'use_rest': 'never'
+ }
+
+ def get_port_mock_object(self, kind=None, data=None):
+ """
+ Helper method to return an na_ontap_net_port object
+ :param kind: passes this param to MockONTAPConnection()
+ :return: na_ontap_net_port object
+ """
+ obj = port_module()
+ obj.autosupport_log = Mock(return_value=None)
+ if data is None:
+ data = self.mock_port
+ obj.server = MockONTAPConnection(kind=kind, data=data)
+ return obj
+
+ def test_module_fail_when_required_args_missing(self):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ port_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+ def test_get_nonexistent_port(self):
+ ''' Test if get_net_port returns None for non-existent port '''
+ set_module_args(self.mock_args())
+ result = self.get_port_mock_object().get_net_port('test')
+ assert result is None
+
+ def test_get_existing_port(self):
+ ''' Test if get_net_port returns details for existing port '''
+ set_module_args(self.mock_args())
+ result = self.get_port_mock_object('port').get_net_port('test')
+ assert result['mtu'] == self.mock_port['mtu']
+ assert result['flowcontrol_admin'] == self.mock_port['flowcontrol_admin']
+ assert result['up_admin'] == self.mock_port['up_admin']
+
+ def test_successful_modify(self):
+ ''' Test modify_net_port '''
+ data = self.mock_args()
+ data['mtu'] = '2000'
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_port_mock_object('port').apply()
+ assert exc.value.args[0]['changed']
+
+ def test_successful_modify_int(self):
+ ''' Test modify_net_port '''
+ data = self.mock_args()
+ data['mtu'] = 2000
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_port_mock_object('port').apply()
+ assert exc.value.args[0]['changed']
+ print(exc.value.args[0]['modify'])
+
+ def test_successful_modify_bool(self):
+ ''' Test modify_net_port '''
+ data = self.mock_args()
+ data['up_admin'] = False
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_port_mock_object('port').apply()
+ assert exc.value.args[0]['changed']
+ print(exc.value.args[0]['modify'])
+
+ def test_successful_modify_str(self):
+ ''' Test modify_net_port '''
+ data = self.mock_args()
+ data['flowcontrol_admin'] = 'anything'
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_port_mock_object('port').apply()
+ assert exc.value.args[0]['changed']
+ print(exc.value.args[0]['modify'])
+
+ def test_successful_modify_multiple_ports(self):
+ ''' Test modify_net_port '''
+ data = self.mock_args()
+ data['ports'] = ['a1', 'a2']
+ data['mtu'] = '2000'
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_port_mock_object('port').apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_net_port.NetAppOntapNetPort.get_net_port')
+ def test_get_called(self, get_port):
+ ''' Test get_net_port '''
+ data = self.mock_args()
+ data['ports'] = ['a1', 'a2']
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_port_mock_object('port').apply()
+ assert get_port.call_count == 2
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_net_port.NetAppOntapNetPort.get_net_port')
+ def test_negative_not_found_1(self, get_port):
+ ''' Test get_net_port '''
+ data = self.mock_args()
+ data['ports'] = ['a1']
+ set_module_args(data)
+ get_port.return_value = None
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_port_mock_object('port').apply()
+ msg = 'Error: port: a1 not found on node: test - check node name.'
+ assert msg in exc.value.args[0]['msg']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_net_port.NetAppOntapNetPort.get_net_port')
+ def test_negative_not_found_2(self, get_port):
+ ''' Test get_net_port '''
+ data = self.mock_args()
+ data['ports'] = ['a1', 'a2']
+ set_module_args(data)
+ get_port.return_value = None
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_port_mock_object('port').apply()
+ msg = 'Error: ports: a1, a2 not found on node: test - check node name.'
+ assert msg in exc.value.args[0]['msg']
+
+ def test_negative_zapi_exception_in_get(self):
+ ''' Test get_net_port '''
+ data = self.mock_args()
+ data['ports'] = ['a1', 'a2']
+ set_module_args(data)
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_port_mock_object('raise').get_net_port('a1')
+ msg = 'Error getting net ports for test: NetApp API failed. Reason - 1111:forcing an error'
+ assert msg in exc.value.args[0]['msg']
+
+ def test_negative_zapi_exception_in_modify(self):
+ ''' Test get_net_port '''
+ data = self.mock_args()
+ data['ports'] = ['a1', 'a2']
+ set_module_args(data)
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_port_mock_object('raise').modify_net_port('a1', dict())
+ msg = 'Error modifying net ports for test: NetApp API failed. Reason - 1111:forcing an error'
+ assert msg in exc.value.args[0]['msg']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.has_netapp_lib')
+ def test_negative_no_netapp_lib(self, get_port):
+ ''' Test get_net_port '''
+ data = self.mock_args()
+ set_module_args(data)
+ get_port.return_value = False
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_port_mock_object('port').apply()
+ msg = 'the python NetApp-Lib module is required'
+ assert msg in exc.value.args[0]['msg']
+
+
+def default_args():
+ return {
+ 'state': 'present',
+ 'hostname': '10.10.10.10',
+ 'username': 'admin',
+ 'https': 'true',
+ 'validate_certs': 'false',
+ 'password': 'password',
+ 'use_rest': 'always'
+ }
+
+
+# REST API canned responses when mocking send_request
+SRR = {
+ # common responses
+ 'is_rest': (200, dict(version=dict(generation=9, major=9, minor=0, full='dummy')), None),
+ 'is_rest_9_6': (200, dict(version=dict(generation=9, major=6, minor=0, full='dummy')), None),
+ 'is_rest_9_7': (200, dict(version=dict(generation=9, major=7, minor=0, full='dummy')), None),
+ 'is_rest_9_8': (200, dict(version=dict(generation=9, major=8, minor=0, full='dummy')), None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'zero_record': (200, dict(records=[], num_records=0), None),
+ 'one_record_uuid': (200, dict(records=[dict(uuid='a1b2c3')], num_records=1), None),
+ 'end_of_sequence': (500, None, "Unexpected call to send_request"),
+ 'generic_error': (400, None, "Expected error"),
+ 'vlan_record': (200, {
+ "num_records": 1,
+ "records": [{
+ 'broadcast_domain': {
+ 'ipspace': {'name': 'Default'},
+ 'name': 'test1'
+ },
+ 'enabled': False,
+ 'name': 'e0c-15',
+ 'node': {'name': 'mohan9-vsim1'},
+ 'uuid': '97936a14-30de-11ec-ac4d-005056b3d8c8'
+ }]
+ }, None)
+}
+
+
+def test_module_fail_when_required_args_missing(patch_ansible):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args(dict(hostname=''))
+ port_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+ msg = 'missing required arguments:'
+ assert msg in exc.value.args[0]['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_module_fail_unsupported_rest_properties(mock_request, patch_ansible):
+ '''throw error if unsupported rest properties are set'''
+ args = dict(default_args())
+ args['node'] = "mohan9-vsim1"
+ args['ports'] = "e0d,e0d-15"
+ args['mtu'] = 1500
+ args['duplex_admin'] = 'admin'
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args(args)
+ port_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+ msg = 'REST API currently does not support'
+ assert msg in exc.value.args[0]['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_enable_port(mock_request, patch_ansible):
+ ''' test enable vlan'''
+ args = dict(default_args())
+ args['node'] = "mohan9-vsim1"
+ args['ports'] = "e0c-15"
+ args['up_admin'] = True
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['vlan_record'], # get
+ SRR['empty_good'], # delete
+ SRR['end_of_sequence']
+ ]
+ my_obj = port_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is True
+ assert_no_warnings()
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_net_routes.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_net_routes.py
new file mode 100644
index 000000000..a886e87a3
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_net_routes.py
@@ -0,0 +1,359 @@
+# (c) 2018-2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test template for ONTAP Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import create_module,\
+ patch_ansible, create_and_apply, expect_and_capture_ansible_exception
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, build_zapi_error, zapi_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke,\
+ register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_net_routes \
+ import NetAppOntapNetRoutes as net_route_module, main as my_main # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+DEFAULT_ARGS = {
+ 'https': 'True',
+ 'use_rest': 'never',
+ 'state': 'present',
+ 'destination': '176.0.0.0/24',
+ 'gateway': '10.193.72.1',
+ 'vserver': 'test_vserver',
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'metric': 70
+}
+
+
+def route_info_zapi(destination='176.0.0.0/24', gateway='10.193.72.1', metric=70):
+ return {
+ 'attributes': {
+ 'net-vs-routes-info': {
+ 'address-family': 'ipv4',
+ 'destination': destination,
+ 'gateway': gateway,
+ 'metric': metric,
+ 'vserver': 'test_vserver'
+ }
+ }
+ }
+
+
+ZRR = zapi_responses({
+ 'net_route_info': build_zapi_response(route_info_zapi()),
+ 'net_route_info_gateway': build_zapi_response(route_info_zapi(gateway='10.193.0.1', metric=40)),
+ 'net_route_info_destination': build_zapi_response(route_info_zapi(destination='178.0.0.1/24', metric=40)),
+ 'error_15661': build_zapi_error(15661, 'not_exists_error'),
+ 'error_13001': build_zapi_error(13001, 'already exists')
+})
+
+
+SRR = rest_responses({
+ 'net_routes_record': (200, {
+ 'records': [
+ {
+ "destination": {"address": "176.0.0.0", "netmask": "24", "family": "ipv4"},
+ "gateway": '10.193.72.1',
+ "uuid": '1cd8a442-86d1-11e0-ae1c-123478563412',
+ "metric": 70,
+ "svm": {"name": "test_vserver"}
+ }
+ ]
+ }, None),
+ 'net_routes_cluster': (200, {
+ 'records': [
+ {
+ "destination": {"address": "176.0.0.0", "netmask": "24", "family": "ipv4"},
+ "gateway": '10.193.72.1',
+ "uuid": '1cd8a442-86d1-11e0-ae1c-123478563412',
+ "metric": 70,
+ "scope": "cluster"
+ }
+ ]
+ }, None),
+ 'modified_record': (200, {
+ 'records': [
+ {
+ "destination": {"address": "0.0.0.0", "netmask": "0", "family": "ipv4"},
+ "gateway": '10.193.72.1',
+ "uuid": '1cd8a442-86d1-11e0-ae1c-123478563412',
+ "scope": "cluster",
+ "metric": 90
+ }
+ ]
+ }, None)
+})
+
+
+def test_module_fail_when_required_args_missing():
+ # with python 2.6, dictionaries are not ordered
+ fragments = ["missing required arguments:", "hostname", "destination", "gateway"]
+ error = create_module(net_route_module, {}, fail=True)['msg']
+ for fragment in fragments:
+ assert fragment in error
+
+
+def test_get_nonexistent_net_route():
+ ''' Test if get_net_route returns None for non-existent net_route '''
+ register_responses([
+ ('net-routes-get', ZRR['no_records'])
+ ])
+ assert create_module(net_route_module, DEFAULT_ARGS).get_net_route() is None
+
+
+def test_get_nonexistent_net_route_15661():
+ ''' Test if get_net_route returns None for non-existent net_route
+ when ZAPI returns an exception for a route not found
+ '''
+ register_responses([
+ ('net-routes-get', ZRR['error_15661'])
+ ])
+ assert create_module(net_route_module, DEFAULT_ARGS).get_net_route() is None
+
+
+def test_get_existing_route():
+ ''' Test if get_net_route returns details for existing net_route '''
+ register_responses([
+ ('net-routes-get', ZRR['net_route_info'])
+ ])
+ result = create_module(net_route_module, DEFAULT_ARGS).get_net_route()
+ assert result['destination'] == DEFAULT_ARGS['destination']
+ assert result['gateway'] == DEFAULT_ARGS['gateway']
+
+
+def test_create_error_missing_param():
+ ''' Test if create throws an error if destination is not specified'''
+ error = 'missing required arguments: destination'
+ assert error in create_module(net_route_module, {'hostname': 'host', 'gateway': 'gate'}, fail=True)['msg']
+
+
+def test_successful_create():
+ ''' Test successful create '''
+ register_responses([
+ ('net-routes-get', ZRR['empty']),
+ ('net-routes-create', ZRR['success']),
+ ('net-routes-get', ZRR['net_route_info']),
+ ])
+ assert create_and_apply(net_route_module, DEFAULT_ARGS)['changed']
+ assert not create_and_apply(net_route_module, DEFAULT_ARGS)['changed']
+
+
+def test_create_zapi_ignore_route_exist():
+ ''' Test NaApiError on create '''
+ register_responses([
+ ('net-routes-get', ZRR['empty']),
+ ('net-routes-create', ZRR['error_13001'])
+ ])
+ assert create_and_apply(net_route_module, DEFAULT_ARGS)['changed']
+
+
+def test_successful_create_zapi_no_metric():
+ ''' Test successful create '''
+ register_responses([
+ ('net-routes-get', ZRR['empty']),
+ ('net-routes-create', ZRR['success'])
+ ])
+ DEFAULT_ARGS_COPY = DEFAULT_ARGS.copy()
+ del DEFAULT_ARGS_COPY['metric']
+ assert create_and_apply(net_route_module, DEFAULT_ARGS)['changed']
+
+
+def test_successful_delete():
+ ''' Test successful delete '''
+ register_responses([
+ ('net-routes-get', ZRR['net_route_info']),
+ ('net-routes-destroy', ZRR['success']),
+ ('net-routes-get', ZRR['empty']),
+ ])
+ assert create_and_apply(net_route_module, DEFAULT_ARGS, {'state': 'absent'})['changed']
+ assert not create_and_apply(net_route_module, DEFAULT_ARGS, {'state': 'absent'})['changed']
+
+
+def test_successful_modify_metric():
+ ''' Test successful modify metric '''
+ register_responses([
+ ('net-routes-get', ZRR['net_route_info']),
+ ('net-routes-destroy', ZRR['success']),
+ ('net-routes-create', ZRR['success'])
+ ])
+ assert create_and_apply(net_route_module, DEFAULT_ARGS, {'metric': '40'})['changed']
+
+
+def test_successful_modify_gateway():
+ ''' Test successful modify gateway '''
+ register_responses([
+ ('net-routes-get', ZRR['empty']),
+ ('net-routes-get', ZRR['net_route_info']),
+ ('net-routes-destroy', ZRR['success']),
+ ('net-routes-create', ZRR['success']),
+ ('net-routes-get', ZRR['net_route_info_gateway'])
+ ])
+ args = {'from_gateway': '10.193.72.1', 'gateway': '10.193.0.1', 'metric': 40}
+ assert create_and_apply(net_route_module, DEFAULT_ARGS, args)['changed']
+ assert not create_and_apply(net_route_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_successful_modify_destination():
+ ''' Test successful modify destination '''
+ register_responses([
+ ('net-routes-get', ZRR['empty']),
+ ('net-routes-get', ZRR['net_route_info']),
+ ('net-routes-destroy', ZRR['success']),
+ ('net-routes-create', ZRR['success']),
+ ('net-routes-get', ZRR['net_route_info_gateway'])
+ ])
+ args = {'from_destination': '176.0.0.0/24', 'destination': '178.0.0.1/24', 'metric': 40}
+ assert create_and_apply(net_route_module, DEFAULT_ARGS, args)['changed']
+ assert not create_and_apply(net_route_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_if_all_methods_catch_exception_zapi():
+ ''' test error zapi - get/create/modify/delete'''
+ register_responses([
+ # ZAPI get/create/delete error.
+ ('net-routes-get', ZRR['error']),
+ ('net-routes-create', ZRR['error']),
+ ('net-routes-destroy', ZRR['error']),
+ # ZAPI modify error.
+ ('net-routes-get', ZRR['net_route_info']),
+ ('net-routes-destroy', ZRR['success']),
+ ('net-routes-create', ZRR['error']),
+ ('net-routes-create', ZRR['success']),
+ # REST get/create/delete error.
+ ('GET', 'cluster', SRR['is_rest_9_11_0']),
+ ('GET', 'network/ip/routes', SRR['generic_error']),
+ ('POST', 'network/ip/routes', SRR['generic_error']),
+ ('DELETE', 'network/ip/routes/12345', SRR['generic_error']),
+ # REST modify error.
+ ('GET', 'cluster', SRR['is_rest_9_11_0']),
+ ('GET', 'network/ip/routes', SRR['net_routes_record']),
+ ('DELETE', 'network/ip/routes/1cd8a442-86d1-11e0-ae1c-123478563412', SRR['success']),
+ ('POST', 'network/ip/routes', SRR['generic_error']),
+ ('POST', 'network/ip/routes', SRR['success']),
+ ])
+ net_route_obj = create_module(net_route_module, DEFAULT_ARGS)
+ assert 'Error fetching net route' in expect_and_capture_ansible_exception(net_route_obj.get_net_route, 'fail')['msg']
+ assert 'Error creating net route' in expect_and_capture_ansible_exception(net_route_obj.create_net_route, 'fail')['msg']
+ current = {'destination': '', 'gateway': ''}
+ assert 'Error deleting net route' in expect_and_capture_ansible_exception(net_route_obj.delete_net_route, 'fail', current)['msg']
+ error = 'Error modifying net route'
+ assert error in create_and_apply(net_route_module, DEFAULT_ARGS, {'metric': 80}, fail=True)['msg']
+
+ net_route_obj = create_module(net_route_module, DEFAULT_ARGS, {'use_rest': 'always'})
+ assert 'Error fetching net route' in expect_and_capture_ansible_exception(net_route_obj.get_net_route, 'fail')['msg']
+ assert 'Error creating net route' in expect_and_capture_ansible_exception(net_route_obj.create_net_route, 'fail')['msg']
+ current = {'uuid': '12345'}
+ assert 'Error deleting net route' in expect_and_capture_ansible_exception(net_route_obj.delete_net_route, 'fail', current)['msg']
+ assert error in create_and_apply(net_route_module, DEFAULT_ARGS, {'metric': 80, 'use_rest': 'always'}, fail=True)['msg']
+
+
+def test_rest_successfully_create():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_0']),
+ ('GET', 'network/ip/routes', SRR['empty_records']),
+ ('POST', 'network/ip/routes', SRR['success']),
+ ('GET', 'cluster', SRR['is_rest_9_11_0']),
+ ('GET', 'network/ip/routes', SRR['net_routes_record'])
+ ])
+ assert create_and_apply(net_route_module, DEFAULT_ARGS, {'use_rest': 'always'})['changed']
+ assert not create_and_apply(net_route_module, DEFAULT_ARGS, {'use_rest': 'always'})['changed']
+
+
+def test_rest_successfully_create_cluster_scope():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_0']),
+ ('GET', 'network/ip/routes', SRR['empty_records']),
+ ('POST', 'network/ip/routes', SRR['success']),
+ ('GET', 'cluster', SRR['is_rest_9_11_0']),
+ ('GET', 'network/ip/routes', SRR['net_routes_cluster']),
+ ])
+ DEFAULT_ARGS_COPY = DEFAULT_ARGS.copy()
+ del DEFAULT_ARGS_COPY['vserver']
+ assert create_and_apply(net_route_module, DEFAULT_ARGS_COPY, {'use_rest': 'always'})['changed']
+ assert not create_and_apply(net_route_module, DEFAULT_ARGS_COPY, {'use_rest': 'always'})['changed']
+
+
+def test_rest_successfully_destroy():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_0']),
+ ('GET', 'network/ip/routes', SRR['net_routes_record']),
+ ('DELETE', 'network/ip/routes/1cd8a442-86d1-11e0-ae1c-123478563412', SRR['success']),
+ ('GET', 'cluster', SRR['is_rest_9_11_0']),
+ ('GET', 'network/ip/routes', SRR['empty_records']),
+ ])
+ args = {'use_rest': 'always', 'state': 'absent'}
+ assert create_and_apply(net_route_module, DEFAULT_ARGS, args)['changed']
+ assert not create_and_apply(net_route_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_rest_successfully_modify():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_0']),
+ ('GET', 'network/ip/routes', SRR['empty_records']),
+ ('GET', 'network/ip/routes', SRR['net_routes_record']),
+ ('DELETE', 'network/ip/routes/1cd8a442-86d1-11e0-ae1c-123478563412', SRR['success']),
+ ('POST', 'network/ip/routes', SRR['success']),
+ ('GET', 'cluster', SRR['is_rest_9_11_0']),
+ ('GET', 'network/ip/routes', SRR['modified_record'])
+ ])
+ args = {'use_rest': 'always', 'metric': '90', 'from_destination': '176.0.0.0/24', 'destination': '0.0.0.0/24'}
+ assert create_and_apply(net_route_module, DEFAULT_ARGS, args)['changed']
+ assert not create_and_apply(net_route_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_rest_negative_modify():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_0']),
+ ('GET', 'network/ip/routes', SRR['empty_records']),
+ ('GET', 'network/ip/routes', SRR['empty_records'])
+ ])
+ error = 'Error modifying: route 176.0.0.0/24 does not exist'
+ args = {'use_rest': 'auto', 'from_destination': '176.0.0.0/24'}
+ assert error in create_and_apply(net_route_module, DEFAULT_ARGS, args, fail=True)['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.has_netapp_lib')
+def test_negative_zapi_no_netapp_lib(mock_has_lib):
+ mock_has_lib.return_value = False
+ msg = 'Error: the python NetApp-Lib module is required.'
+ assert msg in create_module(net_route_module, DEFAULT_ARGS, fail=True)['msg']
+
+
+def test_negative_non_supported_option():
+ error = "REST API currently does not support 'from_metric'"
+ args = {'use_rest': 'always', 'from_metric': 23}
+ assert error in create_module(net_route_module, DEFAULT_ARGS, args, fail=True)['msg']
+
+
+def test_negative_zapi_requires_vserver():
+ DEFAULT_ARGS_COPY = DEFAULT_ARGS.copy()
+ del DEFAULT_ARGS_COPY['vserver']
+ error = "Error: vserver is a required parameter when using ZAPI"
+ assert error in create_module(net_route_module, DEFAULT_ARGS_COPY, fail=True)['msg']
+
+
+def test_negative_dest_format():
+ error = "Error: Expecting '/' in '1.2.3.4'."
+ assert error in create_module(net_route_module, DEFAULT_ARGS, {'destination': '1.2.3.4'}, fail=True)['msg']
+
+
+def test_negative_from_dest_format():
+ args = {'destination': '1.2.3.4', 'from_destination': '5.6.7.8'}
+ error_msg = create_module(net_route_module, DEFAULT_ARGS, args, fail=True)['msg']
+ msg = "Error: Expecting '/' in '1.2.3.4'."
+ assert msg in error_msg
+ msg = "Expecting '/' in '5.6.7.8'."
+ assert msg in error_msg
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_net_subnet.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_net_subnet.py
new file mode 100644
index 000000000..ac284c8d7
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_net_subnet.py
@@ -0,0 +1,275 @@
+# (c) 2018-2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test template for ONTAP Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import\
+ call_main, create_module, expect_and_capture_ansible_exception, patch_ansible
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import\
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_error_message, zapi_responses
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_net_subnet \
+ import NetAppOntapSubnet as my_module, main as my_main # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+DEFAULT_ARGS = {
+ 'name': 'test_subnet',
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'broadcast_domain': 'Default',
+ 'gateway': '10.0.0.1',
+ 'ipspace': 'Default',
+ 'subnet': '10.0.0.0/24',
+ 'ip_ranges': ['10.0.0.10-10.0.0.20', '10.0.0.30'],
+ 'use_rest': 'never'
+}
+
+
+def subnet_info(name):
+ return {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'net-subnet-info': {
+ 'broadcast-domain': DEFAULT_ARGS['broadcast_domain'],
+ 'gateway': DEFAULT_ARGS['gateway'],
+ 'ip-ranges': [{'ip-range': elem} for elem in DEFAULT_ARGS['ip_ranges']],
+ 'ipspace': DEFAULT_ARGS['ipspace'],
+ 'subnet': DEFAULT_ARGS['subnet'],
+ 'subnet-name': name,
+ }
+ }
+ }
+
+
+ZRR = zapi_responses({
+ 'subnet_info': build_zapi_response(subnet_info(DEFAULT_ARGS['name'])),
+ 'subnet_info_renamed': build_zapi_response(subnet_info('new_test_subnet')),
+})
+
+
+SRR = rest_responses({
+ 'subnet_info': (200, {"records": [{
+ "uuid": "82610703",
+ "name": "test_subnet",
+ "ipspace": {"name": "Default"},
+ "gateway": "10.0.0.1",
+ "broadcast_domain": {"name": "Default"},
+ "subnet": {"address": "10.0.0.0", "netmask": "24", "family": "ipv4"},
+ "available_ip_ranges": [
+ {"start": "10.0.0.10", "end": "10.0.0.20", "family": "ipv4"},
+ {"start": "10.0.0.30", "end": "10.0.0.30", "family": "ipv4"}
+ ]
+ }], "num_records": 1}, None),
+ 'subnet_info_renamed': (200, {"records": [{
+ "uuid": "82610703",
+ "name": "new_test_subnet",
+ "ipspace": {"name": "Default"},
+ "gateway": "10.0.0.1",
+ "broadcast_domain": {"name": "Default"},
+ "subnet": {"address": "10.0.0.0", "netmask": "24", "family": "ipv4"},
+ "available_ip_ranges": [
+ {"start": "10.0.0.10", "end": "10.0.0.20", "family": "ipv4"},
+ {"start": "10.0.0.30", "end": "10.0.0.30", "family": "ipv4"}
+ ]
+ }], "num_records": 1}, None)
+})
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.HAS_NETAPP_LIB', False)
+def test_module_fail_when_netapp_lib_missing():
+ ''' required lib missing '''
+ assert 'Error: the python NetApp-Lib module is required. Import error: None' in call_main(my_main, DEFAULT_ARGS, fail=True)['msg']
+
+
+def test_successful_create():
+ register_responses([
+
+ ('ZAPI', 'net-subnet-get-iter', ZRR['no_records']),
+ ('ZAPI', 'net-subnet-create', ZRR['success']),
+ # idempotency
+
+ ('ZAPI', 'net-subnet-get-iter', ZRR['subnet_info']),
+ ])
+ assert call_main(my_main, DEFAULT_ARGS)['changed']
+ # idempotency
+ assert not call_main(my_main, DEFAULT_ARGS)['changed']
+
+
+def test_successful_delete():
+ register_responses([
+ ('ZAPI', 'net-subnet-get-iter', ZRR['subnet_info']),
+ ('ZAPI', 'net-subnet-destroy', ZRR['success']),
+ # idempotency
+ ('ZAPI', 'net-subnet-get-iter', ZRR['no_records']),
+ ])
+ assert call_main(my_main, DEFAULT_ARGS, {'state': 'absent'})['changed']
+ # idempotency
+ assert not call_main(my_main, DEFAULT_ARGS, {'state': 'absent'})['changed']
+
+
+def test_successful_modify():
+ register_responses([
+ ('ZAPI', 'net-subnet-get-iter', ZRR['subnet_info']),
+ ('ZAPI', 'net-subnet-modify', ZRR['success']),
+ # idempotency
+ ('ZAPI', 'net-subnet-get-iter', ZRR['subnet_info']),
+ ])
+ module_args = {'ip_ranges': ['10.0.0.10-10.0.0.25', '10.0.0.30']}
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ # idempotency
+ module_args.pop('ip_ranges')
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successful_rename():
+ register_responses([
+ ('ZAPI', 'net-subnet-get-iter', ZRR['no_records']),
+ ('ZAPI', 'net-subnet-get-iter', ZRR['subnet_info']),
+ ('ZAPI', 'net-subnet-rename', ZRR['success']),
+ # idempotency
+ ('ZAPI', 'net-subnet-get-iter', ZRR['subnet_info']),
+ ])
+ module_args = {'from_name': DEFAULT_ARGS['name'], 'name': 'new_test_subnet'}
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ # idempotency
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_negative_modify_broadcast_domain():
+ register_responses([
+ ('ZAPI', 'net-subnet-get-iter', ZRR['subnet_info']),
+ ])
+ module_args = {'broadcast_domain': 'cannot change'}
+ error = 'Error modifying subnet test_subnet: cannot modify broadcast_domain parameter, desired "cannot change", currrent "Default"'
+ assert error == call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_negative_rename():
+ register_responses([
+ ('ZAPI', 'net-subnet-get-iter', ZRR['no_records']),
+ ('ZAPI', 'net-subnet-get-iter', ZRR['no_records']),
+ ])
+ module_args = {'from_name': DEFAULT_ARGS['name'], 'name': 'new_test_subnet'}
+ assert call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg'] == 'Error renaming: subnet test_subnet does not exist'
+
+
+def test_negative_create():
+ register_responses([
+ ('ZAPI', 'net-subnet-get-iter', ZRR['no_records']),
+ # second test
+ ('ZAPI', 'net-subnet-get-iter', ZRR['no_records']),
+ # third test
+ ('ZAPI', 'net-subnet-get-iter', ZRR['no_records']),
+ ])
+ args = dict(DEFAULT_ARGS)
+ args.pop('subnet')
+ assert call_main(my_main, args, fail=True)['msg'] == 'Error - missing required arguments: subnet.'
+ args = dict(DEFAULT_ARGS)
+ args.pop('broadcast_domain')
+ assert call_main(my_main, args, fail=True)['msg'] == 'Error - missing required arguments: broadcast_domain.'
+ args.pop('subnet')
+ assert call_main(my_main, args, fail=True)['msg'] == 'Error - missing required arguments: subnet.'
+
+
+def test_if_all_methods_catch_exception():
+ register_responses([
+ ('ZAPI', 'net-subnet-get-iter', ZRR['error']),
+ ('ZAPI', 'net-subnet-create', ZRR['error']),
+ ('ZAPI', 'net-subnet-destroy', ZRR['error']),
+ ('ZAPI', 'net-subnet-modify', ZRR['error']),
+ ('ZAPI', 'net-subnet-rename', ZRR['error']),
+ # REST exception
+ ('GET', 'cluster', SRR['is_rest_9_11_1']),
+ ('GET', 'network/ip/subnets', SRR['generic_error']),
+ ('POST', 'network/ip/subnets', SRR['generic_error']),
+ ('PATCH', 'network/ip/subnets/82610703', SRR['generic_error']),
+ ('DELETE', 'network/ip/subnets/82610703', SRR['generic_error']),
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ assert zapi_error_message('Error fetching subnet test_subnet') == expect_and_capture_ansible_exception(my_obj.get_subnet, 'fail')['msg']
+ assert zapi_error_message('Error creating subnet test_subnet') == expect_and_capture_ansible_exception(my_obj.create_subnet, 'fail')['msg']
+ assert zapi_error_message('Error deleting subnet test_subnet') == expect_and_capture_ansible_exception(my_obj.delete_subnet, 'fail')['msg']
+ assert zapi_error_message('Error modifying subnet test_subnet') == expect_and_capture_ansible_exception(my_obj.modify_subnet, 'fail', {})['msg']
+ assert zapi_error_message('Error renaming subnet test_subnet') == expect_and_capture_ansible_exception(my_obj.rename_subnet, 'fail')['msg']
+ my_obj = create_module(my_module, DEFAULT_ARGS, {'use_rest': 'always'})
+ my_obj.uuid = '82610703'
+ assert 'Error fetching subnet test_subnet' in expect_and_capture_ansible_exception(my_obj.get_subnet, 'fail')['msg']
+ assert 'Error creating subnet test_subnet' in expect_and_capture_ansible_exception(my_obj.create_subnet, 'fail')['msg']
+ assert 'Error modifying subnet test_subnet' in expect_and_capture_ansible_exception(my_obj.modify_subnet, 'fail', {})['msg']
+ assert 'Error deleting subnet test_subnet' in expect_and_capture_ansible_exception(my_obj.delete_subnet, 'fail')['msg']
+ modify = {'subnet': '192.168.1.2'}
+ assert 'Error: Invalid value specified for subnet' in expect_and_capture_ansible_exception(my_obj.form_create_modify_body_rest, 'fail', modify)['msg']
+
+
+def test_successful_create_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_1']),
+ ('GET', 'network/ip/subnets', SRR['empty_records']),
+ ('POST', 'network/ip/subnets', SRR['success']),
+ # idempotency
+ ('GET', 'cluster', SRR['is_rest_9_11_1']),
+ ('GET', 'network/ip/subnets', SRR['subnet_info']),
+ ])
+ assert call_main(my_main, DEFAULT_ARGS, {'use_rest': 'always'})['changed']
+ # idempotency
+ assert not call_main(my_main, DEFAULT_ARGS, {'use_rest': 'always'})['changed']
+
+
+def test_successful_modify_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_1']),
+ ('GET', 'network/ip/subnets', SRR['subnet_info']),
+ ('PATCH', 'network/ip/subnets/82610703', SRR['success']),
+ # idempotency
+ ('GET', 'cluster', SRR['is_rest_9_11_1']),
+ ('GET', 'network/ip/subnets', SRR['subnet_info'])
+ ])
+ module_args = {'ip_ranges': ['10.0.0.10-10.0.0.25', '10.0.0.30'], 'use_rest': 'always'}
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ # idempotency
+ module_args.pop('ip_ranges')
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successful_rename_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_1']),
+ ('GET', 'network/ip/subnets', SRR['empty_records']),
+ ('GET', 'network/ip/subnets', SRR['subnet_info']),
+ ('PATCH', 'network/ip/subnets/82610703', SRR['success']),
+ # idempotency
+ ('GET', 'cluster', SRR['is_rest_9_11_1']),
+ ('GET', 'network/ip/subnets', SRR['subnet_info_renamed']),
+ ])
+ module_args = {'from_name': DEFAULT_ARGS['name'], 'name': 'new_test_subnet', 'use_rest': 'always'}
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ # idempotency
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successful_delete_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_1']),
+ ('GET', 'network/ip/subnets', SRR['subnet_info']),
+ ('DELETE', 'network/ip/subnets/82610703', SRR['success']),
+ # idempotency
+ ('GET', 'cluster', SRR['is_rest_9_11_1']),
+ ('GET', 'network/ip/subnets', SRR['empty_records']),
+ ])
+ assert call_main(my_main, DEFAULT_ARGS, {'state': 'absent', 'use_rest': 'always'})['changed']
+ # idempotency
+ assert not call_main(my_main, DEFAULT_ARGS, {'state': 'absent', 'use_rest': 'always'})['changed']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_net_vlan.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_net_vlan.py
new file mode 100644
index 000000000..bbcb9e7fe
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_net_vlan.py
@@ -0,0 +1,252 @@
+# (c) 2018-2021, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test for ONTAP net vlan Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import sys
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import assert_no_warnings, set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_net_vlan \
+ import NetAppOntapVlan as my_module # module under test
+
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not be available')
+
+
+def default_args():
+ args = {
+ 'state': 'present',
+ 'hostname': '10.10.10.10',
+ 'username': 'admin',
+ 'https': 'true',
+ 'validate_certs': 'false',
+ 'password': 'password',
+ 'use_rest': 'always'
+ }
+ return args
+
+
+# REST API canned responses when mocking send_request
+SRR = {
+ # common responses
+ 'is_rest': (200, dict(version=dict(generation=9, major=9, minor=0, full='dummy')), None),
+ 'is_rest_9_6': (200, dict(version=dict(generation=9, major=6, minor=0, full='dummy')), None),
+ 'is_rest_9_7': (200, dict(version=dict(generation=9, major=7, minor=0, full='dummy')), None),
+ 'is_rest_9_8': (200, dict(version=dict(generation=9, major=8, minor=0, full='dummy')), None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'zero_record': (200, dict(records=[], num_records=0), None),
+ 'one_record_uuid': (200, dict(records=[dict(uuid='a1b2c3')], num_records=1), None),
+ 'end_of_sequence': (500, None, "Unexpected call to send_request"),
+ 'generic_error': (400, None, "Expected error"),
+ 'vlan_record': (200, {
+ "num_records": 1,
+ "records": [{
+ 'broadcast_domain': {
+ 'ipspace': {'name': 'Default'},
+ 'name': 'test1'
+ },
+ 'enabled': True,
+ 'name': 'e0c-15',
+ 'node': {'name': 'mohan9cluster2-01'},
+ 'uuid': '97936a14-30de-11ec-ac4d-005056b3d8c8'
+ }]
+ }, None),
+ 'vlan_record_create': (200, {
+ "num_records": 1,
+ "records": [{
+ 'broadcast_domain': {
+ 'ipspace': {'name': 'Default'},
+ 'name': 'test2'
+ },
+ 'enabled': True,
+ 'name': 'e0c-16',
+ 'node': {'name': 'mohan9cluster2-01'},
+ 'uuid': '97936a14-30de-11ec-ac4d-005056b3d8c8'
+ }]
+ }, None),
+ 'vlan_record_modify': (200, {
+ "num_records": 1,
+ "records": [{
+ 'broadcast_domain': {
+ 'ipspace': {'name': 'Default'},
+ 'name': 'test1'
+ },
+ 'enabled': False,
+ 'name': 'e0c-16',
+ 'node': {'name': 'mohan9cluster2-01'},
+ 'uuid': '97936a14-30de-11ec-ac4d-005056b3d8c8'
+ }]
+ }, None)
+}
+
+
+def test_module_fail_when_required_args_missing(patch_ansible):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args(dict(hostname=''))
+ my_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+ msg = 'missing required arguments:'
+ assert msg in exc.value.args[0]['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_module_fail_when_required_args_missing_ONTAP96(mock_request, patch_ansible):
+ ''' required arguments are reported as errors for ONTAP 9.6'''
+ args = dict(default_args())
+ args['node'] = 'mohan9cluster2-01'
+ args['vlanid'] = 154
+ args['parent_interface'] = 'e0c'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_6'] # get version
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+ msg = 'broadcast_domain and ipspace are required fields with ONTAP 9.6 and 9.7'
+ assert msg == exc.value.args[0]['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_module_fail_when_required_args_missing_ONTAP97(mock_request, patch_ansible):
+ ''' required arguments are reported as errors for ONTAP 9.7'''
+ args = dict(default_args())
+ args['node'] = 'mohan9cluster2-01'
+ args['vlanid'] = 154
+ args['parent_interface'] = 'e0c'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_7'] # get version
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+ msg = 'broadcast_domain and ipspace are required fields with ONTAP 9.6 and 9.7'
+ assert msg == exc.value.args[0]['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_ensure_get_vlan_called(mock_request, patch_ansible):
+ ''' test get'''
+ args = dict(default_args())
+ args['node'] = 'mohan9cluster2-01'
+ args['vlanid'] = 15
+ args['parent_interface'] = 'e0c'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['vlan_record'], # get
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is False
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_ensure_create_vlan_called(mock_request, patch_ansible):
+ ''' test create'''
+ args = dict(default_args())
+ args['node'] = 'mohan9cluster2-01'
+ args['vlanid'] = 16
+ args['parent_interface'] = 'e0c'
+ args['broadcast_domain'] = 'test2'
+ args['ipspace'] = 'Default'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['zero_record'], # get
+ SRR['empty_good'], # create
+ SRR['vlan_record_create'], # get created vlan record to check PATCH call required
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is True
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_ensure_modify_vlan_called(mock_request, patch_ansible):
+ ''' test modify'''
+ args = dict(default_args())
+ args['node'] = 'mohan9cluster2-01'
+ args['vlanid'] = 16
+ args['parent_interface'] = 'e0c'
+ args['broadcast_domain'] = 'test1'
+ args['ipspace'] = 'Default'
+ args['enabled'] = 'no'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['vlan_record_create'], # get
+ SRR['empty_good'], # patch call
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is True
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_ensure_delete_vlan_called(mock_request, patch_ansible):
+ ''' test delete'''
+ args = dict(default_args())
+ args['node'] = 'mohan9cluster2-01'
+ args['vlanid'] = 15
+ args['parent_interface'] = 'e0c'
+ args['state'] = 'absent'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['vlan_record'], # get
+ SRR['empty_good'], # delete
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is True
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_ensure_delete_vlan_idempotent(mock_request, patch_ansible):
+ ''' test delete idempotent'''
+ args = dict(default_args())
+ args['node'] = 'mohan9cluster2-01'
+ args['vlanid'] = 15
+ args['parent_interface'] = 'e0c'
+ args['state'] = 'absent'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['zero_record'], # get
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is False
+ assert_no_warnings()
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_nfs.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_nfs.py
new file mode 100644
index 000000000..116f25f06
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_nfs.py
@@ -0,0 +1,338 @@
+# (c) 2018, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test template for ONTAP Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import copy
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat import unittest
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch, Mock
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import\
+ assert_no_warnings, assert_no_warnings_except_zapi, assert_warning_was_raised, call_main, create_and_apply, print_warnings, set_module_args,\
+ AnsibleExitJson, patch_ansible
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import\
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_responses
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_nfs \
+ import NetAppONTAPNFS as nfs_module, main as my_main # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+nfs_info = {
+ "attributes-list": {
+ "nfs-info": {
+ "auth-sys-extended-groups": "false",
+ "cached-cred-harvest-timeout": "86400000",
+ "cached-cred-negative-ttl": "7200000",
+ "cached-cred-positive-ttl": "86400000",
+ "cached-transient-err-ttl": "30000",
+ "chown-mode": "use_export_policy",
+ "enable-ejukebox": "true",
+ "extended-groups-limit": "32",
+ "file-session-io-grouping-count": "5000",
+ "file-session-io-grouping-duration": "120",
+ "ignore-nt-acl-for-root": "false",
+ "is-checksum-enabled-for-replay-cache": "true",
+ "is-mount-rootonly-enabled": "true",
+ "is-netgroup-dns-domain-search": "true",
+ "is-nfs-access-enabled": "false",
+ "is-nfs-rootonly-enabled": "false",
+ "is-nfsv2-enabled": "false",
+ "is-nfsv3-64bit-identifiers-enabled": "false",
+ "is-nfsv3-connection-drop-enabled": "true",
+ "is-nfsv3-enabled": "true",
+ "is-nfsv3-fsid-change-enabled": "true",
+ "is-nfsv4-fsid-change-enabled": "true",
+ "is-nfsv4-numeric-ids-enabled": "true",
+ "is-nfsv40-acl-enabled": "false",
+ "is-nfsv40-enabled": "true",
+ "is-nfsv40-migration-enabled": "false",
+ "is-nfsv40-read-delegation-enabled": "false",
+ "is-nfsv40-referrals-enabled": "false",
+ "is-nfsv40-req-open-confirm-enabled": "false",
+ "is-nfsv40-write-delegation-enabled": "false",
+ "is-nfsv41-acl-enabled": "false",
+ "is-nfsv41-acl-preserve-enabled": "true",
+ "is-nfsv41-enabled": "true",
+ "is-nfsv41-migration-enabled": "false",
+ "is-nfsv41-pnfs-enabled": "true",
+ "is-nfsv41-read-delegation-enabled": "false",
+ "is-nfsv41-referrals-enabled": "false",
+ "is-nfsv41-state-protection-enabled": "true",
+ "is-nfsv41-write-delegation-enabled": "false",
+ "is-qtree-export-enabled": "false",
+ "is-rquota-enabled": "false",
+ "is-tcp-enabled": "false",
+ "is-udp-enabled": "false",
+ "is-v3-ms-dos-client-enabled": "false",
+ "is-validate-qtree-export-enabled": "true",
+ "is-vstorage-enabled": "false",
+ "map-unknown-uid-to-default-windows-user": "true",
+ "mountd-port": "635",
+ "name-service-lookup-protocol": "udp",
+ "netgroup-trust-any-ns-switch-no-match": "false",
+ "nfsv4-acl-max-aces": "400",
+ "nfsv4-grace-seconds": "45",
+ "nfsv4-id-domain": "defaultv4iddomain.com",
+ "nfsv4-lease-seconds": "30",
+ "nfsv41-implementation-id-domain": "netapp.com",
+ "nfsv41-implementation-id-name": "NetApp Release Kalyaniblack__9.4.0",
+ "nfsv41-implementation-id-time": "1541070767",
+ "nfsv4x-session-num-slots": "180",
+ "nfsv4x-session-slot-reply-cache-size": "640",
+ "nlm-port": "4045",
+ "nsm-port": "4046",
+ "ntacl-display-permissive-perms": "false",
+ "ntfs-unix-security-ops": "use_export_policy",
+ "permitted-enc-types": {
+ "string": ["des", "des3", "aes_128", "aes_256"]
+ },
+ "rpcsec-ctx-high": "0",
+ "rpcsec-ctx-idle": "0",
+ "rquotad-port": "4049",
+ "showmount": "true",
+ "showmount-timestamp": "1548372452",
+ "skip-root-owner-write-perm-check": "false",
+ "tcp-max-xfer-size": "1048576",
+ "udp-max-xfer-size": "32768",
+ "v3-search-unconverted-filename": "false",
+ "v4-inherited-acl-preserve": "false",
+ "vserver": "ansible"
+ }
+ },
+ "num-records": "1"
+}
+
+nfs_info_no_tcp_max_xfer_size = copy.deepcopy(nfs_info)
+del nfs_info_no_tcp_max_xfer_size['attributes-list']['nfs-info']['tcp-max-xfer-size']
+
+
+class MockONTAPConnection(object):
+ ''' mock server connection to ONTAP host '''
+
+ def __init__(self, kind=None, data=None, job_error=None):
+ ''' save arguments '''
+ self.kind = kind
+ self.params = data
+ self.xml_in = None
+ self.xml_out = None
+
+ def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
+ ''' mock invoke_successfully returning xml data '''
+ self.xml_in = xml
+ if self.kind == 'nfs':
+ xml = self.build_nfs_info(self.params)
+ self.xml_out = xml
+ if self.kind == 'nfs_status':
+ xml = self.build_nfs_status_info(self.params)
+ return xml
+
+ @staticmethod
+ def build_nfs_info(nfs_details):
+ ''' build xml data for volume-attributes '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ xml.translate_struct(nfs_info)
+ return xml
+
+ @staticmethod
+ def build_nfs_status_info(nfs_status_details):
+ ''' build xml data for volume-attributes '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ attributes = {
+ 'is-enabled': "true"
+ }
+ xml.translate_struct(attributes)
+ return xml
+
+
+DEFAULT_ARGS = {
+ 'vserver': 'nfs_vserver',
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'https': 'false',
+ 'use_rest': 'never'
+}
+
+
+SRR = zapi_responses({
+ 'nfs_info': build_zapi_response(nfs_info),
+ 'nfs_info_no_tcp_max_xfer_size': build_zapi_response(nfs_info_no_tcp_max_xfer_size)
+})
+
+
+class TestMyModule(unittest.TestCase):
+ ''' a group of related Unit Tests '''
+
+ def setUp(self):
+ self.mock_nfs_group = {
+ 'vserver': DEFAULT_ARGS['vserver'],
+ }
+
+ def mock_args(self):
+ return dict(DEFAULT_ARGS)
+
+ def get_nfs_mock_object(self, kind=None):
+ """
+ Helper method to return an na_ontap_volume object
+ :param kind: passes this param to MockONTAPConnection()
+ :return: na_ontap_volume object
+ """
+ nfsy_obj = nfs_module()
+ nfsy_obj.asup_log_for_cserver = Mock(return_value=None)
+ nfsy_obj.cluster = Mock()
+ nfsy_obj.cluster.invoke_successfully = Mock()
+ if kind is None:
+ nfsy_obj.server = MockONTAPConnection()
+ else:
+ nfsy_obj.server = MockONTAPConnection(kind=kind, data=self.mock_nfs_group)
+ return nfsy_obj
+
+ def test_module_fail_when_required_args_missing(self):
+ ''' required arguments are reported as errors '''
+ error = 'missing required arguments'
+ assert error in call_main(my_main, {}, fail=True)['msg']
+
+ def test_get_nonexistent_nfs(self):
+ ''' Test if get_nfs_service returns None for non-existent nfs '''
+ set_module_args(self.mock_args())
+ result = self.get_nfs_mock_object().get_nfs_service()
+ assert result is None
+
+ def test_get_existing_nfs(self):
+ ''' Test if get_policy_group returns details for existing nfs '''
+ set_module_args(self.mock_args())
+ result = self.get_nfs_mock_object('nfs').get_nfs_service()
+ assert result['nfsv3']
+
+ def test_get_nonexistent_nfs_status(self):
+ ''' Test if get__nfs_status returns None for non-existent nfs '''
+ set_module_args(self.mock_args())
+ result = self.get_nfs_mock_object().get_nfs_status()
+ assert result is None
+
+ def test_get_existing_nfs_status(self):
+ ''' Test if get__nfs_status returns details for nfs '''
+ set_module_args(self.mock_args())
+ result = self.get_nfs_mock_object('nfs_status').get_nfs_status()
+ assert result
+
+ def test_modify_nfs(self):
+ ''' Test if modify_nfs runs for existing nfs '''
+ data = self.mock_args()
+ current = {
+ 'nfsv3': 'enabled',
+ 'nfsv3_fsid_change': 'enabled',
+ 'nfsv4': 'enabled',
+ 'nfsv41': 'enabled',
+ 'vstorage_state': 'enabled',
+ 'tcp': 'enabled',
+ 'udp': 'enabled',
+ 'nfsv4_id_domain': 'nfsv4_id_domain',
+ 'nfsv40_acl': 'enabled',
+ 'nfsv40_read_delegation': 'enabled',
+ 'nfsv40_write_delegation': 'enabled',
+ 'nfsv41_acl': 'enabled',
+ 'nfsv41_read_delegation': 'enabled',
+ 'nfsv41_write_delegation': 'enabled',
+ 'showmount': 'enabled',
+ 'tcp_max_xfer_size': '1048576',
+ }
+
+ data.update(current)
+ set_module_args(data)
+ self.get_nfs_mock_object('nfs_status').modify_nfs_service(current)
+
+ def test_successfully_modify_nfs(self):
+ ''' Test modify nfs successful for modifying tcp max xfer size. '''
+ data = self.mock_args()
+ data['tcp_max_xfer_size'] = 8192
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_nfs_mock_object('nfs').apply()
+ assert exc.value.args[0]['changed']
+
+ def test_modify_nfs_idempotency(self):
+ ''' Test modify nfs idempotency '''
+ data = self.mock_args()
+ data['tcp_max_xfer_size'] = '1048576'
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_nfs_mock_object('nfs').apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_nfs.NetAppONTAPNFS.delete_nfs_service')
+ def test_successfully_delete_nfs(self, delete_nfs_service):
+ ''' Test successfully delete nfs '''
+ data = self.mock_args()
+ data['state'] = 'absent'
+ set_module_args(data)
+ obj = self.get_nfs_mock_object('nfs')
+ with pytest.raises(AnsibleExitJson) as exc:
+ obj.apply()
+ assert exc.value.args[0]['changed']
+ delete_nfs_service.assert_called_with()
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_nfs.NetAppONTAPNFS.get_nfs_service')
+ def test_successfully_enable_nfs(self, get_nfs_service):
+ ''' Test successfully enable nfs on non-existent nfs '''
+ data = self.mock_args()
+ data['state'] = 'present'
+ set_module_args(data)
+ get_nfs_service.side_effect = [
+ None,
+ {}
+ ]
+ obj = self.get_nfs_mock_object('nfs')
+ with pytest.raises(AnsibleExitJson) as exc:
+ obj.apply()
+ assert exc.value.args[0]['changed']
+
+
+def test_modify_tcp_max_xfer_size():
+ ''' if ZAPI returned a None value, a modify is attempted '''
+ register_responses([
+ # ONTAP 9.4 and later, tcp_max_xfer_size is an INT
+ ('ZAPI', 'nfs-service-get-iter', SRR['nfs_info']),
+ ('ZAPI', 'nfs-status', SRR['success']),
+ ('ZAPI', 'nfs-service-modify', SRR['success']),
+ # ONTAP 9.4 and later, tcp_max_xfer_size is an INT, idempotency
+ ('ZAPI', 'nfs-service-get-iter', SRR['nfs_info']),
+ # ONTAP 9.3 and earlier, tcp_max_xfer_size is not set
+ ('ZAPI', 'nfs-service-get-iter', SRR['nfs_info_no_tcp_max_xfer_size']),
+ ])
+ module_args = {
+ 'tcp_max_xfer_size': 4500
+ }
+ assert create_and_apply(nfs_module, DEFAULT_ARGS, module_args)['changed']
+ module_args = {
+ 'tcp_max_xfer_size': 1048576
+ }
+ assert not create_and_apply(nfs_module, DEFAULT_ARGS, module_args)['changed']
+ error = 'Error: tcp_max_xfer_size is not supported on ONTAP 9.3 or earlier.'
+ assert create_and_apply(nfs_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == error
+ assert_no_warnings_except_zapi()
+
+
+def test_warning_on_nfsv41_alias():
+ ''' if ZAPI returned a None value, a modify is attempted '''
+ register_responses([
+ # ONTAP 9.4 and later, tcp_max_xfer_size is an INT
+ ('ZAPI', 'nfs-service-get-iter', SRR['nfs_info']),
+ ('ZAPI', 'nfs-status', SRR['success']),
+ ('ZAPI', 'nfs-service-modify', SRR['success']),
+ ])
+ module_args = {
+ 'nfsv4.1': 'disabled'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ print_warnings()
+ assert_warning_was_raised('Error: "nfsv4.1" option conflicts with Ansible naming conventions - please use "nfsv41".')
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_nfs_rest.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_nfs_rest.py
new file mode 100644
index 000000000..995dbeb6f
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_nfs_rest.py
@@ -0,0 +1,324 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import sys
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args, \
+ AnsibleFailJson, AnsibleExitJson, patch_ansible, create_and_apply, create_module, expect_and_capture_ansible_exception
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import get_mock_record, \
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_nfs \
+ import NetAppONTAPNFS as my_module, main as my_main # module under test
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+# REST API canned responses when mocking send_request.
+# The rest_factory provides default responses shared across testcases.
+SRR = rest_responses({
+ # module specific responses
+ 'one_record': (200, {"records": [
+ {
+ "svm": {
+ "uuid": "671aa46e-11ad-11ec-a267-005056b30cfa",
+ "name": "ansibleSVM"
+ },
+ "transport": {
+ "udp_enabled": True,
+ "tcp_enabled": True
+ },
+ "protocol": {
+ "v3_enabled": True,
+ "v4_id_domain": "carchi8py.com",
+ "v40_enabled": False,
+ "v41_enabled": False,
+ "v40_features": {
+ "acl_enabled": False,
+ "read_delegation_enabled": False,
+ "write_delegation_enabled": False
+ },
+ "v41_features": {
+ "acl_enabled": False,
+ "read_delegation_enabled": False,
+ "write_delegation_enabled": False,
+ "pnfs_enabled": False
+ }
+ },
+ "vstorage_enabled": False,
+ "showmount_enabled": True,
+ "root": {
+ "ignore_nt_acl": False,
+ "skip_write_permission_check": False
+ },
+ "security": {
+ "chown_mode": "restricted",
+ "nt_acl_display_permission": False,
+ "ntfs_unix_security": "fail",
+ "permitted_encryption_types": ["des3"],
+ "rpcsec_context_idle": 5
+ },
+ "windows":{
+ "v3_ms_dos_client_enabled": False,
+ "map_unknown_uid_to_default_user": True,
+ "default_user": "test_user"
+ },
+ "tcp_max_xfer_size": "16384"
+ }
+ ]}, None),
+})
+
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'vserver': 'ansibleSVM',
+ 'use_rest': 'always',
+}
+
+
+def set_default_args():
+ return dict({
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'vserver': 'ansibleSVM',
+ 'use_rest': 'always',
+ })
+
+
+def test_get_nfs_rest_none():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/nfs/services', SRR['empty_records'])
+ ])
+ set_module_args(set_default_args())
+ my_obj = my_module()
+ assert my_obj.get_nfs_service_rest() is None
+
+
+def test_partially_supported_rest():
+ register_responses([('GET', 'cluster', SRR['is_rest_96'])])
+ module_args = set_default_args()
+ module_args['showmount'] = 'enabled'
+ set_module_args(module_args)
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+ msg = "Error: Minimum version of ONTAP for showmount is (9, 8)."
+ assert msg in exc.value.args[0]['msg']
+
+
+def test_get_nfs_rest_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/nfs/services', SRR['generic_error'])
+ ])
+ my_module_object = create_module(my_module, DEFAULT_ARGS)
+ msg = 'Error getting nfs services for SVM ansibleSVM: calling: protocols/nfs/services: got Expected error.'
+ assert msg in expect_and_capture_ansible_exception(my_module_object.get_nfs_service_rest, 'fail')['msg']
+
+
+def test_get_nfs_rest_one_record():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_0']),
+ ('GET', 'protocols/nfs/services', SRR['one_record'])
+ ])
+ set_module_args(set_default_args())
+ my_obj = my_module()
+ assert my_obj.get_nfs_service_rest() is not None
+
+
+def test_create_nfs():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/nfs/services', SRR['empty_records']),
+ ('POST', 'protocols/nfs/services', SRR['empty_good'])
+ ])
+ assert create_and_apply(my_module, DEFAULT_ARGS, {})['changed']
+
+
+def test_create_nfs_all_options():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('POST', 'protocols/nfs/services', SRR['empty_good'])
+ ])
+ set_module_args(set_default_args())
+ my_obj = my_module()
+ my_obj.parameters['nfsv3'] = True
+ my_obj.parameters['nfsv4'] = False
+ my_obj.parameters['nfsv41'] = False
+ my_obj.parameters['nfsv41_pnfs'] = False
+ my_obj.parameters['vstorage_state'] = False
+ my_obj.parameters['nfsv4_id_domain'] = 'carchi8py.com'
+ my_obj.parameters['tcp'] = True
+ my_obj.parameters['udp'] = True
+ my_obj.parameters['nfsv40_acl'] = False
+ my_obj.parameters['nfsv40_read_delegation'] = False
+ my_obj.parameters['nfsv40_write_delegation'] = False
+ my_obj.parameters['nfsv41_acl'] = False
+ my_obj.parameters['nfsv41_read_delegation'] = False
+ my_obj.parameters['nfsv41_write_delegation'] = False
+ my_obj.parameters['showmount'] = True
+ my_obj.parameters['service_state'] = 'stopped'
+ my_obj.create_nfs_service_rest()
+ assert get_mock_record().is_record_in_json({'svm.name': 'ansibleSVM'}, 'POST', 'protocols/nfs/services')
+
+
+def test_create_nfs_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('POST', 'protocols/nfs/services', SRR['generic_error'])
+ ])
+ my_module_object = create_module(my_module, DEFAULT_ARGS)
+ msg = 'Error creating nfs service for SVM ansibleSVM: calling: protocols/nfs/services: got Expected error.'
+ assert msg in expect_and_capture_ansible_exception(my_module_object.create_nfs_service_rest, 'fail')['msg']
+
+
+def test_delete_nfs():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_0']),
+ ('GET', 'protocols/nfs/services', SRR['one_record']),
+ ('DELETE', 'protocols/nfs/services/671aa46e-11ad-11ec-a267-005056b30cfa', SRR['empty_good'])
+ ])
+ module_args = {
+ 'state': 'absent'
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_delete_nfs_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('DELETE', 'protocols/nfs/services/671aa46e-11ad-11ec-a267-005056b30cfa', SRR['generic_error'])
+ ])
+ set_module_args(set_default_args())
+ my_obj = my_module()
+ my_obj.parameters['state'] = 'absent'
+ my_obj.svm_uuid = '671aa46e-11ad-11ec-a267-005056b30cfa'
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.delete_nfs_service_rest()
+ print('Info: %s' % exc.value.args[0]['msg'])
+ msg = "Error deleting nfs service for SVM ansibleSVM"
+ assert msg == exc.value.args[0]['msg']
+
+
+def test_delete_nfs_no_uuid_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ])
+ module_args = {
+ 'state': 'absent'
+ }
+ my_module_object = create_module(my_module, DEFAULT_ARGS, module_args)
+ msg = "Error deleting nfs service for SVM ansibleSVM: svm.uuid is None"
+ assert msg in expect_and_capture_ansible_exception(my_module_object.delete_nfs_service_rest, 'fail')['msg']
+
+
+def test_modify_nfs():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_0']),
+ ('GET', 'protocols/nfs/services', SRR['one_record']),
+ ('PATCH', 'protocols/nfs/services/671aa46e-11ad-11ec-a267-005056b30cfa', SRR['empty_good'])
+ ])
+ set_module_args(set_default_args())
+ my_obj = my_module()
+ my_obj.parameters['nfsv3'] = 'disabled'
+ my_obj.svm_uuid = '671aa46e-11ad-11ec-a267-005056b30cfa'
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+
+
+def test_modify_nfs_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('PATCH', 'protocols/nfs/services/671aa46e-11ad-11ec-a267-005056b30cfa', SRR['generic_error'])
+ ])
+ set_module_args(set_default_args())
+ my_obj = my_module()
+ my_obj.parameters['nfsv3'] = 'disabled'
+ my_obj.svm_uuid = '671aa46e-11ad-11ec-a267-005056b30cfa'
+ modify = {'nfsv3': False}
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.modify_nfs_service_rest(modify)
+ print('Info: %s' % exc.value.args[0]['msg'])
+ msg = "Error modifying nfs service for SVM ansibleSVM: calling: protocols/nfs/services/671aa46e-11ad-11ec-a267-005056b30cfa: got Expected error."
+ assert msg == exc.value.args[0]['msg']
+
+
+def test_modify_nfs_no_uuid_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ])
+ set_module_args(set_default_args())
+ my_obj = my_module()
+ my_obj.parameters['nfsv3'] = 'disabled'
+ modify = {'nfsv3': False}
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.modify_nfs_service_rest(modify)
+ print('Info: %s' % exc.value.args[0]['msg'])
+ msg = "Error modifying nfs service for SVM ansibleSVM: svm.uuid is None"
+ assert msg == exc.value.args[0]['msg']
+
+
+def test_modify_nfs_root():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_0']),
+ ('GET', 'protocols/nfs/services', SRR['one_record']),
+ ('PATCH', 'protocols/nfs/services/671aa46e-11ad-11ec-a267-005056b30cfa', SRR['success'])
+ ])
+ module_args = {
+ "root":
+ {
+ "ignore_nt_acl": True,
+ "skip_write_permission_check": True
+ }
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_nfs_security():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_0']),
+ ('GET', 'protocols/nfs/services', SRR['one_record']),
+ ('PATCH', 'protocols/nfs/services/671aa46e-11ad-11ec-a267-005056b30cfa', SRR['success'])
+ ])
+ module_args = {
+ "security":
+ {
+ "chown_mode": "restricted",
+ "nt_acl_display_permission": "true",
+ "ntfs_unix_security": "fail",
+ "permitted_encryption_types": ["des3"],
+ "rpcsec_context_idle": 5
+ }
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_nfs_windows():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_0']),
+ ('GET', 'protocols/nfs/services', SRR['one_record']),
+ ('PATCH', 'protocols/nfs/services/671aa46e-11ad-11ec-a267-005056b30cfa', SRR['success'])
+ ])
+ module_args = {
+ "windows":
+ {
+ "v3_ms_dos_client_enabled": True,
+ "map_unknown_uid_to_default_user": False,
+ "default_user": "test_user"
+ },
+ "tcp_max_xfer_size": "16384"
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_node.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_node.py
new file mode 100644
index 000000000..d29c5c64f
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_node.py
@@ -0,0 +1,222 @@
+# (c) 2018, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test template for ONTAP Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat import unittest
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_node \
+ import NetAppOntapNode as node_module # module under test
+
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+# REST API canned responses when mocking send_request
+SRR = {
+ # common responses
+ 'is_rest': (200, {}, None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'end_of_sequence': (500, None, "Unexpected call to send_request"),
+ 'generic_error': (400, None, "Expected error"),
+ 'node_record': (200, {"num_records": 1, "records": [
+ {
+ "uuid": "02c9e252-41be-11e9-81d5-00a0986138f7",
+ "name": 'node1',
+ "location": 'myloc'}
+ ]}, None)
+}
+
+
+class MockONTAPConnection(object):
+ ''' mock server connection to ONTAP host '''
+
+ def __init__(self, kind=None, data=None):
+ ''' save arguments '''
+ self.type = kind
+ self.params = data
+ self.xml_in = None
+ self.xml_out = None
+
+ def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
+ ''' mock invoke_successfully returning xml data '''
+ self.xml_in = xml
+ if self.type == 'node':
+ xml = self.build_node_info()
+ elif self.type == 'node_fail':
+ raise netapp_utils.zapi.NaApiError(code='TEST', message="This exception is from the unit test")
+ self.xml_out = xml
+ return xml
+
+ @staticmethod
+ def build_node_info():
+ ''' build xml data for node-details-info '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ data = {
+ 'attributes': {
+ 'node-details-info': {
+ "node": "node1",
+ "node-location": "myloc",
+ "node-asset-tag": "mytag"
+ }
+ }
+ }
+ xml.translate_struct(data)
+ return xml
+
+
+class TestMyModule(unittest.TestCase):
+ ''' a group of related Unit Tests '''
+
+ def set_default_args(self, use_rest=None):
+ hostname = '10.10.10.10'
+ username = 'username'
+ password = 'password'
+ name = 'node1'
+
+ args = dict({
+ 'hostname': hostname,
+ 'username': username,
+ 'password': password,
+ 'name': name,
+ 'location': 'myloc'
+ })
+
+ if use_rest is not None:
+ args['use_rest'] = use_rest
+
+ return args
+
+ @staticmethod
+ def get_node_mock_object(cx_type='zapi', kind=None):
+ node_obj = node_module()
+ if cx_type == 'zapi':
+ if kind is None:
+ node_obj.server = MockONTAPConnection()
+ else:
+ node_obj.server = MockONTAPConnection(kind=kind)
+ return node_obj
+
+ def test_ensure_get_called(self):
+ ''' test get_node for non-existent entry'''
+ set_module_args(self.set_default_args(use_rest='Never'))
+ print('starting')
+ my_obj = node_module()
+ print('use_rest:', my_obj.use_rest)
+ my_obj.cluster = MockONTAPConnection('node')
+ assert my_obj.get_node is not None
+
+ def test_successful_rename(self):
+ ''' renaming node and testing idempotency '''
+ data = self.set_default_args(use_rest='Never')
+ data['from_name'] = 'node1'
+ data['name'] = 'node2'
+ set_module_args(data)
+ my_obj = node_module()
+ my_obj.cluster = MockONTAPConnection('node')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+ # to reset na_helper from remembering the previous 'changed' value
+ data['name'] = 'node1'
+ set_module_args(data)
+ my_obj = node_module()
+ my_obj.cluster = MockONTAPConnection('node')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert not exc.value.args[0]['changed']
+
+ def test_successful_modify(self):
+ ''' modifying node and testing idempotency '''
+ data = self.set_default_args(use_rest='Never')
+ data['location'] = 'myloc1'
+ set_module_args(data)
+ my_obj = node_module()
+ my_obj.cluster = MockONTAPConnection('node')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+ # to reset na_helper from remembering the previous 'changed' value
+ data['location'] = 'myloc'
+ set_module_args(data)
+ my_obj = node_module()
+ my_obj.cluster = MockONTAPConnection('node')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert not exc.value.args[0]['changed']
+
+ def test_if_all_methods_catch_exception(self):
+ data = self.set_default_args(use_rest='Never')
+ data['from_name'] = 'node1'
+ data['name'] = 'node2'
+ set_module_args(data)
+ my_obj = node_module()
+ my_obj.cluster = MockONTAPConnection('node_fail')
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.rename_node()
+ assert 'Error renaming node: ' in exc.value.args[0]['msg']
+ data = self.set_default_args(use_rest='Never')
+ data['location'] = 'myloc1'
+ set_module_args(data)
+ my_obj1 = node_module()
+ my_obj1.cluster = MockONTAPConnection('node_fail')
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj1.modify_node()
+ assert 'Error modifying node: ' in exc.value.args[0]['msg']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_successful_modify_rest(self, mock_request):
+ data = self.set_default_args()
+ data['from_name'] = 'node2'
+ data['location'] = 'mylocnew'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['node_record'], # get
+ SRR['empty_good'], # no response for modify
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_node_mock_object(cx_type='rest').apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_successful_rename_rest(self, mock_request):
+ data = self.set_default_args()
+ data['from_name'] = 'node'
+ data['name'] = 'node2'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['node_record'], # get
+ SRR['empty_good'], # no response for modify
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_node_mock_object(cx_type='rest').apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_successful_modify_location_rest(self, mock_request):
+ data = self.set_default_args()
+ data['location'] = 'mylocnew'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['node_record'], # get
+ SRR['empty_good'], # no response for modify
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_node_mock_object(cx_type='rest').apply()
+ assert exc.value.args[0]['changed']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ntfs_dacl.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ntfs_dacl.py
new file mode 100644
index 000000000..da8e15ffc
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ntfs_dacl.py
@@ -0,0 +1,232 @@
+# (c) 2020, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests for Ansible module: na_ontap_ntfs_dacl'''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat import unittest
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch, Mock
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_ntfs_dacl \
+ import NetAppOntapNtfsDacl as dacl_module # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+HAS_NETAPP_ZAPI_MSG = "pip install netapp_lib is required"
+
+
+class MockONTAPConnection(object):
+ ''' mock server connection to ONTAP host '''
+
+ def __init__(self, kind=None, data=None):
+ ''' save arguments '''
+ self.kind = kind
+ self.params = data
+ self.xml_in = None
+ self.xml_out = None
+
+ def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
+ ''' mock invoke_successfully returning xml data '''
+ self.xml_in = xml
+ request = xml.to_string().decode('utf-8')
+ if self.kind == 'error':
+ raise netapp_utils.zapi.NaApiError('test', 'expect error')
+ elif request.startswith("<ems-autosupport-log>"):
+ xml = None # or something that may the logger happy, and you don't need @patch anymore
+ # or
+ # xml = build_ems_log_response()
+ elif request.startswith("<file-directory-security-ntfs-dacl-get-iter>"):
+ if self.kind == 'create':
+ xml = self.build_dacl_info()
+ else:
+ xml = self.build_dacl_info(self.params)
+ elif request.startswith("<file-directory-security-ntfs-dacl-modify>"):
+ xml = self.build_dacl_info(self.params)
+ self.xml_out = xml
+ return xml
+
+ @staticmethod
+ def build_dacl_info(data=None):
+ xml = netapp_utils.zapi.NaElement('xml')
+ vserver = 'vserver'
+ attributes = {'num-records': '0',
+ 'attributes-list': {'file-directory-security-ntfs-dacl': {'vserver': vserver}}}
+
+ if data is not None:
+ attributes['num-records'] = '1'
+ if data.get('access_type'):
+ attributes['attributes-list']['file-directory-security-ntfs-dacl']['access-type'] = data['access_type']
+ if data.get('account'):
+ attributes['attributes-list']['file-directory-security-ntfs-dacl']['account'] = data['account']
+ if data.get('rights'):
+ attributes['attributes-list']['file-directory-security-ntfs-dacl']['rights'] = data['rights']
+ if data.get('advanced_rights'):
+ attributes['attributes-list']['file-directory-security-ntfs-dacl']['advanced-rights'] = data['advanced_rights']
+ if data.get('apply_to'):
+ tmp = []
+ for target in data['apply_to']:
+ tmp.append({'inheritance-level': target})
+ attributes['attributes-list']['file-directory-security-ntfs-dacl']['apply-to'] = tmp
+ if data.get('security_descriptor'):
+ attributes['attributes-list']['file-directory-security-ntfs-dacl']['ntfs-sd'] = data['security_descriptor']
+ xml.translate_struct(attributes)
+ return xml
+
+
+class TestMyModule(unittest.TestCase):
+ ''' Unit tests for na_ontap_ntfs_dacl '''
+
+ def mock_args(self):
+ return {
+ 'vserver': 'vserver',
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!'
+ }
+
+ def get_dacl_mock_object(self, type='zapi', kind=None, status=None):
+ dacl_obj = dacl_module()
+ dacl_obj.autosupport_log = Mock(return_value=None)
+ if type == 'zapi':
+ if kind is None:
+ dacl_obj.server = MockONTAPConnection()
+ else:
+ dacl_obj.server = MockONTAPConnection(kind=kind, data=status)
+ return dacl_obj
+
+ def test_module_fail_when_required_args_missing(self):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ dacl_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+ def test_get_dacl_error(self):
+ data = self.mock_args()
+ data['access_type'] = 'allow'
+ data['account'] = 'acc_test'
+ data['rights'] = 'full_control'
+ data['security_descriptor'] = 'sd_test'
+ data['apply_to'] = 'this_folder,files'
+ set_module_args(data)
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_dacl_mock_object('zapi', 'error', data).apply()
+ msg = 'Error fetching allow DACL for account acc_test for security descriptor sd_test: NetApp API failed. Reason - test:expect error'
+ assert exc.value.args[0]['msg'] == msg
+
+ def test_successfully_create_dacl(self):
+ data = self.mock_args()
+ data['access_type'] = 'allow'
+ data['account'] = 'acc_test'
+ data['rights'] = 'full_control'
+ data['security_descriptor'] = 'sd_test'
+ data['apply_to'] = 'this_folder,files'
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_dacl_mock_object('zapi', 'create', data).apply()
+ assert exc.value.args[0]['changed']
+
+ def test_create_dacl_idempotency(self):
+ data = self.mock_args()
+ data['access_type'] = 'allow'
+ data['account'] = 'acc_test'
+ data['rights'] = 'full_control'
+ data['security_descriptor'] = 'sd_test'
+ data['apply_to'] = ['this_folder', 'files']
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_dacl_mock_object('zapi', 'create_idempotency', data).apply()
+ assert not exc.value.args[0]['changed']
+
+ def test_successfully_modify_dacl(self):
+ data = self.mock_args()
+ data['access_type'] = 'allow'
+ data['account'] = 'acc_test'
+ data['rights'] = 'full_control'
+ data['security_descriptor'] = 'sd_test'
+ data['apply_to'] = ['this_folder', 'files']
+ set_module_args(data)
+ data['advanced_rights'] = 'read_data,write_data'
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_dacl_mock_object('zapi', 'create', data).apply()
+ assert exc.value.args[0]['changed']
+
+ def test_modify_dacl_idempotency(self):
+ data = self.mock_args()
+ data['access_type'] = 'allow'
+ data['account'] = 'acc_test'
+ data['rights'] = 'full_control'
+ data['security_descriptor'] = 'sd_test'
+ data['apply_to'] = ['this_folder', 'files']
+ set_module_args(data)
+ data['rights'] = 'full_control'
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_dacl_mock_object('zapi', 'modify_idempotency', data).apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_ntfs_dacl.NetAppOntapNtfsDacl.get_dacl')
+ def test_modify_error(self, get_info):
+ data = self.mock_args()
+ data['access_type'] = 'allow'
+ data['account'] = 'acc_test'
+ data['rights'] = 'full_control'
+ data['security_descriptor'] = 'sd_test'
+ set_module_args(data)
+ get_info.side_effect = [
+ {
+ 'access_type': 'allow',
+ 'account': 'acc_test',
+ 'security_descriptor': 'sd_test',
+ 'rights': 'modify',
+ 'apply_to': ['this_folder', 'files']
+ }
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_dacl_mock_object('zapi', 'error', data).apply()
+ msg = 'Error modifying allow DACL for account acc_test for security descriptor sd_test: NetApp API failed. Reason - test:expect error'
+ assert exc.value.args[0]['msg'] == msg
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_ntfs_dacl.NetAppOntapNtfsDacl.get_dacl')
+ def test_create_error(self, get_info):
+ data = self.mock_args()
+ data['access_type'] = 'allow'
+ data['account'] = 'acc_test'
+ data['rights'] = 'full_control'
+ data['security_descriptor'] = 'sd_test'
+ set_module_args(data)
+ get_info.side_effect = [
+ None
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_dacl_mock_object('zapi', 'error', data).apply()
+ msg = 'Error adding allow DACL for account acc_test for security descriptor sd_test: NetApp API failed. Reason - test:expect error'
+ assert exc.value.args[0]['msg'] == msg
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_ntfs_dacl.NetAppOntapNtfsDacl.get_dacl')
+ def test_delete_error(self, get_info):
+ data = self.mock_args()
+ data['access_type'] = 'allow'
+ data['account'] = 'acc_test'
+ data['rights'] = 'full_control'
+ data['security_descriptor'] = 'sd_test'
+ data['state'] = 'absent'
+ set_module_args(data)
+ get_info.side_effect = [
+ {
+ 'access_type': 'allow',
+ 'account': 'acc_test',
+ 'security_descriptor': 'sd_test',
+ 'rights': 'modify'
+ }
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_dacl_mock_object('zapi', 'error', data).apply()
+ msg = 'Error deleting allow DACL for account acc_test for security descriptor sd_test: NetApp API failed. Reason - test:expect error'
+ assert exc.value.args[0]['msg'] == msg
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ntfs_sd.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ntfs_sd.py
new file mode 100644
index 000000000..6f1f78b34
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ntfs_sd.py
@@ -0,0 +1,189 @@
+# (c) 2020, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests for Ansible module: na_ontap_ntfs_sd'''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat import unittest
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_ntfs_sd \
+ import NetAppOntapNtfsSd as sd_module # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+class MockONTAPConnection(object):
+ ''' mock server connection to ONTAP host '''
+
+ def __init__(self, kind=None, data=None):
+ ''' save arguments '''
+ self.kind = kind
+ self.params = data
+ self.xml_in = None
+ self.xml_out = None
+
+ def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
+ ''' mock invoke_successfully returning xml data '''
+ self.xml_in = xml
+ request = xml.to_string().decode('utf-8')
+ if request.startswith("<ems-autosupport-log>"):
+ xml = None # or something that may the logger happy, and you don't need @patch anymore
+ # or
+ # xml = build_ems_log_response()
+ elif self.kind == 'error':
+ raise netapp_utils.zapi.NaApiError('test', 'expect error')
+ elif request.startswith("<file-directory-security-ntfs-get-iter>"):
+ if self.kind == 'create':
+ xml = self.build_sd_info()
+ else:
+ xml = self.build_sd_info(self.params)
+ elif request.startswith("<file-directory-security-ntfs-modify>"):
+ xml = self.build_sd_info(self.params)
+ self.xml_out = xml
+ return xml
+
+ @staticmethod
+ def build_sd_info(data=None):
+ xml = netapp_utils.zapi.NaElement('xml')
+ vserver = 'vserver'
+ attributes = {'num-records': 1,
+ 'attributes-list': {'file-directory-security-ntfs': {'vserver': vserver}}}
+ if data is not None:
+ if data.get('name'):
+ attributes['attributes-list']['file-directory-security-ntfs']['ntfs-sd'] = data['name']
+ if data.get('owner'):
+ attributes['attributes-list']['file-directory-security-ntfs']['owner'] = data['owner']
+ if data.get('group'):
+ attributes['attributes-list']['file-directory-security-ntfs']['group'] = data['group']
+ if data.get('control_flags_raw'):
+ attributes['attributes-list']['file-directory-security-ntfs']['control-flags-raw'] = str(data['control_flags_raw'])
+ xml.translate_struct(attributes)
+ return xml
+
+
+class TestMyModule(unittest.TestCase):
+ ''' Unit tests for na_ontap_ntfs_sd '''
+
+ def mock_args(self):
+ return {
+ 'vserver': 'vserver',
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!'
+ }
+
+ def get_sd_mock_object(self, type='zapi', kind=None, status=None):
+ sd_obj = sd_module()
+ # netapp_utils.ems_log_event = Mock(return_value=None)
+ if type == 'zapi':
+ if kind is None:
+ sd_obj.server = MockONTAPConnection()
+ else:
+ sd_obj.server = MockONTAPConnection(kind=kind, data=status)
+ return sd_obj
+
+ def test_module_fail_when_required_args_missing(self):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ sd_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+ def test_successfully_create_sd(self):
+ data = self.mock_args()
+ data['name'] = 'sd_test'
+ data['owner'] = 'user_test'
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_sd_mock_object('zapi', 'create', data).apply()
+ assert exc.value.args[0]['changed']
+
+ def test_create_sd_idempotency(self):
+ data = self.mock_args()
+ data['name'] = 'sd_test'
+ data['owner'] = 'user_test'
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_sd_mock_object('zapi', 'create_idempotency', data).apply()
+ assert not exc.value.args[0]['changed']
+
+ def test_successfully_modify_sd(self):
+ data = self.mock_args()
+ data['name'] = 'sd_test'
+ data['owner'] = 'user_test'
+ data['control_flags_raw'] = 1
+ set_module_args(data)
+ data['control_flags_raw'] = 2
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_sd_mock_object('zapi', 'create', data).apply()
+ assert exc.value.args[0]['changed']
+
+ def test_modify_sd_idempotency(self):
+ data = self.mock_args()
+ data['name'] = 'sd_test'
+ data['owner'] = 'user_test'
+ data['control_flags_raw'] = 2
+ set_module_args(data)
+ data['control_flags_raw'] = 2
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_sd_mock_object('zapi', 'modify_idempotency', data).apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_ntfs_sd.NetAppOntapNtfsSd.get_ntfs_sd')
+ def test_modify_error(self, get_info):
+ data = self.mock_args()
+ data['name'] = 'sd_test'
+ data['owner'] = 'user_test'
+ data['control_flags_raw'] = 2
+ set_module_args(data)
+ get_info.side_effect = [
+ {
+ 'name': 'sd_test',
+ 'control_flags_raw': 1
+ }
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_sd_mock_object('zapi', 'error', data).apply()
+ print(exc)
+ assert exc.value.args[0]['msg'] == 'Error modifying NTFS security descriptor sd_test: NetApp API failed. Reason - test:expect error'
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_ntfs_sd.NetAppOntapNtfsSd.get_ntfs_sd')
+ def test_create_error(self, get_info):
+ data = self.mock_args()
+ data['name'] = 'sd_test'
+ data['owner'] = 'user_test'
+ set_module_args(data)
+ get_info.side_effect = [
+ None
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_sd_mock_object('zapi', 'error', data).apply()
+ print(exc)
+ assert exc.value.args[0]['msg'] == 'Error creating NTFS security descriptor sd_test: NetApp API failed. Reason - test:expect error'
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_ntfs_sd.NetAppOntapNtfsSd.get_ntfs_sd')
+ def test_delete_error(self, get_info):
+ data = self.mock_args()
+ data['name'] = 'sd_test'
+ data['owner'] = 'user_test'
+ data['state'] = 'absent'
+ set_module_args(data)
+ get_info.side_effect = [
+ {
+ 'name': 'sd_test',
+ 'owner': 'user_test'
+ }
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_sd_mock_object('zapi', 'error', data).apply()
+ print(exc)
+ assert exc.value.args[0]['msg'] == 'Error deleting NTFS security descriptor sd_test: NetApp API failed. Reason - test:expect error'
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ntp.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ntp.py
new file mode 100644
index 000000000..0632cff98
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ntp.py
@@ -0,0 +1,143 @@
+# (c) 2018-2021, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test for ONTAP snmp Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+import sys
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import assert_no_warnings, set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_ntp \
+ import NetAppOntapNTPServer as my_module, main as uut_main # module under test
+
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+
+def default_args():
+ return {
+ 'state': 'present',
+ 'hostname': '10.10.10.10',
+ 'username': 'admin',
+ 'https': 'true',
+ 'validate_certs': 'false',
+ 'password': 'password',
+ 'use_rest': 'always'
+ }
+
+
+# REST API canned responses when mocking send_request
+SRR = {
+ # common responses
+ 'is_rest': (200, dict(version=dict(generation=9, major=9, minor=0, full='dummy')), None),
+ 'is_rest_9_6': (200, dict(version=dict(generation=9, major=6, minor=0, full='dummy')), None),
+ 'is_rest_9_8': (200, dict(version=dict(generation=9, major=8, minor=0, full='dummy')), None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'zero_record': (200, dict(records=[], num_records=0), None),
+ 'one_record_uuid': (200, dict(records=[dict(uuid='a1b2c3')], num_records=1), None),
+ 'end_of_sequence': (500, None, "Unexpected call to send_request"),
+ 'generic_error': (400, None, "Expected error"),
+ 'server_record': (200, {
+ "records": [{
+ "server": "0.0.0.0",
+ "version": "auto",
+ }],
+ 'num_records': 1
+ }, None),
+ 'create_server': (200, {
+ 'job': {
+ 'uuid': 'fde79888-692a-11ea-80c2-005056b39fe7',
+ '_links': {
+ 'self': {
+ 'href': '/api/cluster/jobs/fde79888-692a-11ea-80c2-005056b39fe7'}}}
+ }, None),
+ 'job': (200, {
+ "uuid": "fde79888-692a-11ea-80c2-005056b39fe7",
+ "state": "success",
+ "start_time": "2020-02-26T10:35:44-08:00",
+ "end_time": "2020-02-26T10:47:38-08:00",
+ "_links": {
+ "self": {
+ "href": "/api/cluster/jobs/fde79888-692a-11ea-80c2-005056b39fe7"
+ }
+ }
+ }, None)
+}
+
+
+def test_module_fail_when_required_args_missing(patch_ansible):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args(dict(hostname=''))
+ my_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+ msg = 'missing required arguments: server_name'
+ assert msg == exc.value.args[0]['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_ensure_get_server_called(mock_request, patch_ansible):
+ args = dict(default_args())
+ args['server_name'] = '0.0.0.0'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['server_record'], # get
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is False
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_ensure_create_server_called(mock_request, patch_ansible):
+ args = dict(default_args())
+ args['server_name'] = '0.0.0.0'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['zero_record'], # get
+ SRR['create_server'], # create
+ SRR['job'], # Job
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is True
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_ensure_delete_server_called(mock_request, patch_ansible):
+ args = dict(default_args())
+ args['server_name'] = '0.0.0.0'
+ args['state'] = 'absent'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['server_record'], # get
+ SRR['empty_good'], # delete
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is True
+ assert_no_warnings()
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ntp_key.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ntp_key.py
new file mode 100644
index 000000000..9e4ed661e
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ntp_key.py
@@ -0,0 +1,141 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import sys
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args, \
+ patch_ansible, create_and_apply, create_module, expect_and_capture_ansible_exception
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import get_mock_record, \
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_ntp_key \
+ import NetAppOntapNTPKey as my_module, main as my_main # module under test
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+SRR = rest_responses({
+ 'ntp_key': (200, {
+ "records": [
+ {
+ "id": 1,
+ "digest_type": "sha1",
+ "value": "addf120b430021c36c232c99ef8d926aea2acd6b"
+ }],
+ "num_records": 1
+ }, None),
+ 'svm_uuid': (200, {"records": [
+ {
+ 'uuid': 'e3cb5c7f-cd20'
+ }], "num_records": 1}, None)
+})
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'use_rest': 'always'
+}
+
+
+def test_get_ntp_key_none():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster/ntp/keys', SRR['empty_records'])
+ ])
+ module_args = {'id': 1, 'digest_type': 'sha1', 'value': 'test'}
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.get_ntp_key() is None
+
+
+def test_get_ntp_key_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster/ntp/keys', SRR['generic_error'])
+ ])
+ module_args = {'id': 1, 'digest_type': 'sha1', 'value': 'test'}
+ my_module_object = create_module(my_module, DEFAULT_ARGS, module_args)
+ msg = 'Error fetching key with id 1: calling: cluster/ntp/keys: got Expected error.'
+ assert msg in expect_and_capture_ansible_exception(my_module_object.get_ntp_key, 'fail')['msg']
+
+
+def test_create_ntp_key():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster/ntp/keys', SRR['empty_records']),
+ ('POST', 'cluster/ntp/keys', SRR['empty_good'])
+ ])
+ module_args = {'id': 1, 'digest_type': 'sha1', 'value': 'test'}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_create_ntp_key_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('POST', 'cluster/ntp/keys', SRR['generic_error'])
+ ])
+ module_args = {'id': 1, 'digest_type': 'sha1', 'value': 'test'}
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ error = expect_and_capture_ansible_exception(my_obj.create_ntp_key, 'fail')['msg']
+ print('Info: %s' % error)
+ assert 'Error creating key with id 1: calling: cluster/ntp/keys: got Expected error.' == error
+
+
+def test_delete_ntp_key():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster/ntp/keys', SRR['ntp_key']),
+ ('DELETE', 'cluster/ntp/keys/1', SRR['empty_good'])
+ ])
+ module_args = {'state': 'absent', 'id': 1, 'digest_type': 'sha1', 'value': 'test'}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_delete_ntp_key_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('DELETE', 'cluster/ntp/keys/1', SRR['generic_error'])
+ ])
+ module_args = {'id': 1, 'digest_type': 'sha1', 'value': 'test', 'state': 'absent'}
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ error = expect_and_capture_ansible_exception(my_obj.delete_ntp_key, 'fail')['msg']
+ print('Info: %s' % error)
+ assert 'Error deleting key with id 1: calling: cluster/ntp/keys/1: got Expected error.' == error
+
+
+def test_modify_ntp_key():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster/ntp/keys', SRR['ntp_key']),
+ ('PATCH', 'cluster/ntp/keys/1', SRR['empty_good'])
+ ])
+ module_args = {'id': 1, 'digest_type': 'sha1', 'value': 'test2'}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_ntp_key_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster/ntp/keys', SRR['ntp_key']),
+ ('PATCH', 'cluster/ntp/keys/1', SRR['generic_error'])
+ ])
+ module_args = {'id': 1, 'digest_type': 'sha1', 'value': 'test2'}
+ error = create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ print('Info: %s' % error)
+ assert 'Error modifying key with id 1: calling: cluster/ntp/keys/1: got Expected error.' == error
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_nvme.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_nvme.py
new file mode 100644
index 000000000..b24c0e289
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_nvme.py
@@ -0,0 +1,185 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests ONTAP Ansible module: na_ontap_nvme'''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+
+from ansible_collections.netapp.ontap.tests.unit.compat import unittest
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_nvme \
+ import NetAppONTAPNVMe as my_module
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+class MockONTAPConnection(object):
+ ''' mock server connection to ONTAP host '''
+
+ def __init__(self, kind=None):
+ ''' save arguments '''
+ self.type = kind
+ self.xml_in = None
+ self.xml_out = None
+
+ def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
+ ''' mock invoke_successfully returning xml data '''
+ self.xml_in = xml
+ if self.type == 'nvme':
+ xml = self.build_nvme_info()
+ elif self.type == 'nvme_fail':
+ raise netapp_utils.zapi.NaApiError(code='TEST', message="This exception is from the unit test")
+ self.xml_out = xml
+ return xml
+
+ @staticmethod
+ def build_nvme_info():
+ ''' build xml data for nvme-info '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ data = {'num-records': 1,
+ 'attributes-list': [{'nvme-target-service-info': {'is-available': 'true'}}]}
+ xml.translate_struct(data)
+ return xml
+
+
+class TestMyModule(unittest.TestCase):
+ ''' a group of related Unit Tests '''
+
+ def setUp(self):
+ self.server = MockONTAPConnection()
+ self.onbox = False
+
+ def set_default_args(self):
+ if self.onbox:
+ hostname = '10.193.75.3'
+ username = 'admin'
+ password = 'netapp1!'
+ vserver = 'ansible'
+ status_admin = True
+ else:
+ hostname = 'hostname'
+ username = 'username'
+ password = 'password'
+ vserver = 'vserver'
+ status_admin = True
+ return dict({
+ 'hostname': hostname,
+ 'username': username,
+ 'password': password,
+ 'vserver': vserver,
+ 'status_admin': status_admin,
+ 'use_rest': 'never',
+ })
+
+ def test_module_fail_when_required_args_missing(self):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ my_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+ def test_ensure_get_called(self):
+ ''' test get_nvme() for non-existent nvme'''
+ set_module_args(self.set_default_args())
+ my_obj = my_module()
+ my_obj.server = self.server
+ assert my_obj.get_nvme() is None
+
+ def test_ensure_get_called_existing(self):
+ ''' test get_nvme() for existing nvme'''
+ set_module_args(self.set_default_args())
+ my_obj = my_module()
+ my_obj.server = MockONTAPConnection(kind='nvme')
+ assert my_obj.get_nvme()
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_nvme.NetAppONTAPNVMe.create_nvme')
+ def test_successful_create(self, create_nvme):
+ ''' creating nvme and testing idempotency '''
+ set_module_args(self.set_default_args())
+ my_obj = my_module()
+ if not self.onbox:
+ my_obj.server = self.server
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+ create_nvme.assert_called_with()
+ # to reset na_helper from remembering the previous 'changed' value
+ my_obj = my_module()
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('nvme')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_nvme.NetAppONTAPNVMe.delete_nvme')
+ def test_successful_delete(self, delete_nvme):
+ ''' deleting nvme and testing idempotency '''
+ data = self.set_default_args()
+ data['state'] = 'absent'
+ set_module_args(data)
+ my_obj = my_module()
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('nvme')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+ delete_nvme.assert_called_with()
+ # to reset na_helper from remembering the previous 'changed' value
+ my_obj = my_module()
+ if not self.onbox:
+ my_obj.server = self.server
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_nvme.NetAppONTAPNVMe.modify_nvme')
+ def test_successful_modify(self, modify_nvme):
+ ''' modifying nvme and testing idempotency '''
+ data = self.set_default_args()
+ data['status_admin'] = False
+ set_module_args(data)
+ my_obj = my_module()
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('nvme')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+ modify_nvme.assert_called_with()
+ # to reset na_helper from remembering the previous 'changed' value
+ data = self.set_default_args()
+ set_module_args(data)
+ my_obj = my_module()
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('nvme')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert not exc.value.args[0]['changed']
+
+ def test_if_all_methods_catch_exception(self):
+ module_args = {}
+ module_args.update(self.set_default_args())
+ set_module_args(module_args)
+ my_obj = my_module()
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('nvme_fail')
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.get_nvme()
+ assert 'Error fetching nvme info:' in exc.value.args[0]['msg']
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.create_nvme()
+ assert 'Error creating nvme' in exc.value.args[0]['msg']
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.delete_nvme()
+ assert 'Error deleting nvme' in exc.value.args[0]['msg']
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.modify_nvme()
+ assert 'Error modifying nvme' in exc.value.args[0]['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_nvme_namespace.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_nvme_namespace.py
new file mode 100644
index 000000000..b70b9fd10
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_nvme_namespace.py
@@ -0,0 +1,168 @@
+# (c) 2018, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests ONTAP Ansible module: na_ontap_nvme_namespace'''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat import unittest
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_nvme_namespace \
+ import NetAppONTAPNVMENamespace as my_module
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+class MockONTAPConnection(object):
+ ''' mock server connection to ONTAP host '''
+
+ def __init__(self, kind=None):
+ ''' save arguments '''
+ self.type = kind
+ self.xml_in = None
+ self.xml_out = None
+
+ def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
+ ''' mock invoke_successfully returning xml data '''
+ self.xml_in = xml
+ if self.type == 'namespace':
+ xml = self.build_namespace_info()
+ elif self.type == 'quota_fail':
+ raise netapp_utils.zapi.NaApiError(code='TEST', message="This exception is from the unit test")
+ self.xml_out = xml
+ return xml
+
+ @staticmethod
+ def build_namespace_info():
+ ''' build xml data for namespace-info '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ data = {'num-records': 2,
+ 'attributes-list': [{'nvme-namespace-info': {'path': 'abcd/vol'}},
+ {'nvme-namespace-info': {'path': 'xyz/vol'}}]}
+ xml.translate_struct(data)
+ return xml
+
+
+class TestMyModule(unittest.TestCase):
+ ''' a group of related Unit Tests '''
+
+ def setUp(self):
+ self.server = MockONTAPConnection()
+ self.onbox = False
+
+ def set_default_args(self):
+ if self.onbox:
+ hostname = '10.193.75.3'
+ username = 'admin'
+ password = 'netapp1!'
+ vserver = 'ansible'
+ ostype = 'linux'
+ path = 'abcd/vol'
+ size = 20
+ size_unit = 'mb'
+ else:
+ hostname = 'hostname'
+ username = 'username'
+ password = 'password'
+ vserver = 'vserver'
+ ostype = 'linux'
+ path = 'abcd/vol'
+ size = 20
+ size_unit = 'mb'
+ return dict({
+ 'hostname': hostname,
+ 'username': username,
+ 'password': password,
+ 'ostype': ostype,
+ 'vserver': vserver,
+ 'path': path,
+ 'size': size,
+ 'size_unit': size_unit,
+ 'use_rest': 'never',
+ })
+
+ def test_module_fail_when_required_args_missing(self):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ my_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+ def test_ensure_get_called(self):
+ ''' test get_namespace() for non-existent namespace'''
+ set_module_args(self.set_default_args())
+ my_obj = my_module()
+ my_obj.server = self.server
+ assert my_obj.get_namespace() is None
+
+ def test_ensure_get_called_existing(self):
+ ''' test get_namespace() for existing namespace'''
+ set_module_args(self.set_default_args())
+ my_obj = my_module()
+ my_obj.server = MockONTAPConnection(kind='namespace')
+ assert my_obj.get_namespace()
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_nvme_namespace.NetAppONTAPNVMENamespace.create_namespace')
+ def test_successful_create(self, create_namespace):
+ ''' creating namespace and testing idempotency '''
+ set_module_args(self.set_default_args())
+ my_obj = my_module()
+ if not self.onbox:
+ my_obj.server = self.server
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+ create_namespace.assert_called_with()
+ # to reset na_helper from remembering the previous 'changed' value
+ my_obj = my_module()
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('namespace')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_nvme_namespace.NetAppONTAPNVMENamespace.delete_namespace')
+ def test_successful_delete(self, delete_namespace):
+ ''' deleting namespace and testing idempotency '''
+ data = self.set_default_args()
+ data['state'] = 'absent'
+ set_module_args(data)
+ my_obj = my_module()
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('namespace')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+ delete_namespace.assert_called_with()
+ # to reset na_helper from remembering the previous 'changed' value
+ my_obj = my_module()
+ if not self.onbox:
+ my_obj.server = self.server
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert not exc.value.args[0]['changed']
+
+ def test_if_all_methods_catch_exception(self):
+ module_args = {}
+ module_args.update(self.set_default_args())
+ set_module_args(module_args)
+ my_obj = my_module()
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('quota_fail')
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.get_namespace()
+ assert 'Error fetching namespace info:' in exc.value.args[0]['msg']
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.create_namespace()
+ assert 'Error creating namespace for path' in exc.value.args[0]['msg']
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.delete_namespace()
+ assert 'Error deleting namespace for path' in exc.value.args[0]['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_nvme_namespace_rest.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_nvme_namespace_rest.py
new file mode 100644
index 000000000..648caaf87
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_nvme_namespace_rest.py
@@ -0,0 +1,121 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+""" unit tests for Ansible module: na_ontap_aggregate when using REST """
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import sys
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args, \
+ patch_ansible, create_and_apply, create_module, expect_and_capture_ansible_exception
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import get_mock_record, \
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_nvme_namespace \
+ import NetAppONTAPNVMENamespace as my_module, main as my_main # module under test
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+SRR = rest_responses({
+ 'nvme_namespace': (200, {
+ "name": "/vol/test/disk1",
+ "uuid": "81068ae6-4674-4d78-a8b7-dadb23f67edf",
+ "svm": {
+ "name": "ansibleSVM"
+ },
+ "enabled": True
+ }, None)
+})
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'use_rest': 'always',
+ 'vserver': 'ansibleSVM',
+ 'ostype': 'linux',
+ 'path': '/vol/test/disk1',
+ 'size': 10,
+ 'size_unit': 'mb',
+ 'block_size': 4096
+}
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ # with python 2.6, dictionaries are not ordered
+ fragments = ["missing required arguments:", "hostname", "path", "vserver"]
+ error = create_module(my_module, {}, fail=True)['msg']
+ for fragment in fragments:
+ assert fragment in error
+
+
+def test_get_namespace_none():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'storage/namespaces', SRR['empty_records'])
+ ])
+ set_module_args(DEFAULT_ARGS)
+ my_obj = my_module()
+ assert my_obj.get_namespace_rest() is None
+
+
+def test_get_namespace_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'storage/namespaces', SRR['generic_error'])
+ ])
+ my_module_object = create_module(my_module, DEFAULT_ARGS)
+ msg = 'Error fetching namespace info for vserver: ansibleSVM'
+ assert msg in expect_and_capture_ansible_exception(my_module_object.get_namespace_rest, 'fail')['msg']
+
+
+def test_create_namespace():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'storage/namespaces', SRR['empty_records']),
+ ('POST', 'storage/namespaces', SRR['empty_good'])
+ ])
+ assert create_and_apply(my_module, DEFAULT_ARGS)['changed']
+
+
+def test_create_namespace_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('POST', 'storage/namespaces', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ error = expect_and_capture_ansible_exception(my_obj.create_namespace_rest, 'fail')['msg']
+ msg = 'Error creating namespace for vserver ansibleSVM: calling: storage/namespaces: got Expected error.'
+ assert msg == error
+
+
+def test_delete_namespace():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'storage/namespaces', SRR['nvme_namespace']),
+ ('DELETE', 'storage/namespaces/81068ae6-4674-4d78-a8b7-dadb23f67edf', SRR['empty_good'])
+ ])
+ module_args = {'state': 'absent'}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_delete_namespace_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('DELETE', 'storage/namespaces/81068ae6-4674-4d78-a8b7-dadb23f67edf', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['state'] = 'absent'
+ my_obj.namespace_uuid = '81068ae6-4674-4d78-a8b7-dadb23f67edf'
+ error = expect_and_capture_ansible_exception(my_obj.delete_namespace_rest, 'fail')['msg']
+ msg = 'Error deleting namespace for vserver ansibleSVM: calling: storage/namespaces/81068ae6-4674-4d78-a8b7-dadb23f67edf: got Expected error.'
+ assert msg == error
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_nvme_rest.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_nvme_rest.py
new file mode 100644
index 000000000..db23a8e72
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_nvme_rest.py
@@ -0,0 +1,131 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+""" unit tests for Ansible module: na_ontap_aggregate when using REST """
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import sys
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import patch_ansible, \
+ create_and_apply, create_module, expect_and_capture_ansible_exception
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import get_mock_record, \
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_nvme \
+ import NetAppONTAPNVMe as my_module, main as my_main # module under test
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+SRR = rest_responses({
+ 'nvme_service': (200, {
+ "svm": {
+ "name": "svm1",
+ "uuid": "02c9e252-41be-11e9-81d5-00a0986138f7"
+ },
+ "enabled": True
+ }, None)
+})
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'use_rest': 'always',
+ 'vserver': 'svm1'
+}
+
+
+def test_get_nvme_none():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'protocols/nvme/services', SRR['empty_records'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ assert my_obj.get_nvme_rest() is None
+
+
+def test_get_nvme_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'protocols/nvme/services', SRR['generic_error'])
+ ])
+ my_module_object = create_module(my_module, DEFAULT_ARGS)
+ msg = 'Error fetching nvme info for vserver: svm1'
+ assert msg in expect_and_capture_ansible_exception(my_module_object.get_nvme_rest, 'fail')['msg']
+
+
+def test_create_nvme():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'protocols/nvme/services', SRR['empty_records']),
+ ('POST', 'protocols/nvme/services', SRR['empty_good'])
+ ])
+ module_args = {'status_admin': True}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_create_nvme_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('POST', 'protocols/nvme/services', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['status_admin'] = True
+ error = expect_and_capture_ansible_exception(my_obj.create_nvme_rest, 'fail')['msg']
+ msg = 'Error creating nvme for vserver svm1: calling: protocols/nvme/services: got Expected error.'
+ assert msg == error
+
+
+def test_delete_nvme():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'protocols/nvme/services', SRR['nvme_service']),
+ ('PATCH', 'protocols/nvme/services/02c9e252-41be-11e9-81d5-00a0986138f7', SRR['empty_good']),
+ ('DELETE', 'protocols/nvme/services/02c9e252-41be-11e9-81d5-00a0986138f7', SRR['empty_good'])
+ ])
+ module_args = {'state': 'absent'}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_delete_nvme_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('DELETE', 'protocols/nvme/services/02c9e252-41be-11e9-81d5-00a0986138f7', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['state'] = 'absent'
+ my_obj.svm_uuid = '02c9e252-41be-11e9-81d5-00a0986138f7'
+ error = expect_and_capture_ansible_exception(my_obj.delete_nvme_rest, 'fail')['msg']
+ msg = 'Error deleting nvme for vserver svm1: calling: protocols/nvme/services/02c9e252-41be-11e9-81d5-00a0986138f7: got Expected error.'
+ assert msg == error
+
+
+def test_modify_nvme():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'protocols/nvme/services', SRR['nvme_service']),
+ ('PATCH', 'protocols/nvme/services/02c9e252-41be-11e9-81d5-00a0986138f7', SRR['empty_good'])
+ ])
+ module_args = {'status_admin': False}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_nvme_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('PATCH', 'protocols/nvme/services/02c9e252-41be-11e9-81d5-00a0986138f7', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['status_admin'] = False
+ my_obj.svm_uuid = '02c9e252-41be-11e9-81d5-00a0986138f7'
+ error = expect_and_capture_ansible_exception(my_obj.modify_nvme_rest, 'fail', {'status': False})['msg']
+ msg = 'Error modifying nvme for vserver: svm1'
+ assert msg == error
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_nvme_subsystem.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_nvme_subsystem.py
new file mode 100644
index 000000000..0e6acdbec
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_nvme_subsystem.py
@@ -0,0 +1,225 @@
+# (c) 2018-2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests ONTAP Ansible module: na_ontap_nvme_subsystem '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import\
+ expect_and_capture_ansible_exception, call_main, create_module, patch_ansible
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import\
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_error_message, zapi_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_nvme_subsystem import NetAppONTAPNVMESubsystem as my_module, main as my_main
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+subsystem_info = {
+ 'attributes-list': [{'nvme-target-subsystem-map-info': {'path': 'abcd/vol'}},
+ {'nvme-target-subsystem-map-info': {'path': 'xyz/vol'}}]}
+
+subsystem_info_one_path = {
+ 'attributes-list': [{'nvme-target-subsystem-map-info': {'path': 'abcd/vol'}}]}
+
+subsystem_info_one_host = {
+ 'attributes-list': [{'nvme-target-subsystem-map-info': {'host-nqn': 'host-nqn'}}]}
+
+ZRR = zapi_responses({
+ 'subsystem_info': build_zapi_response(subsystem_info, 2),
+ 'subsystem_info_one_path': build_zapi_response(subsystem_info_one_path, 1),
+ 'subsystem_info_one_host': build_zapi_response(subsystem_info_one_host, 1),
+})
+
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'use_rest': 'never',
+ 'subsystem': 'subsystem',
+ 'vserver': 'vserver',
+}
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ register_responses([
+ ])
+ args = dict(DEFAULT_ARGS)
+ args.pop('subsystem')
+ error = 'missing required arguments: subsystem'
+ assert error in call_main(my_main, args, fail=True)['msg']
+
+
+def test_ensure_get_called():
+ ''' test get_subsystem() for non-existent subsystem'''
+ register_responses([
+ ('ZAPI', 'nvme-subsystem-get-iter', ZRR['success']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.get_subsystem() is None
+
+
+def test_ensure_get_called_existing():
+ ''' test get_subsystem() for existing subsystem'''
+ register_responses([
+ ('ZAPI', 'nvme-subsystem-get-iter', ZRR['subsystem_info']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.get_subsystem()
+
+
+def test_successful_create():
+ ''' creating subsystem and testing idempotency '''
+ register_responses([
+ ('ZAPI', 'nvme-subsystem-get-iter', ZRR['no_records']),
+ ('ZAPI', 'nvme-subsystem-create', ZRR['success']),
+ ('ZAPI', 'nvme-subsystem-get-iter', ZRR['subsystem_info']),
+ # idempptency
+ ('ZAPI', 'nvme-subsystem-get-iter', ZRR['subsystem_info']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'ostype': 'windows'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successful_delete():
+ ''' deleting subsystem and testing idempotency '''
+ register_responses([
+ ('ZAPI', 'nvme-subsystem-get-iter', ZRR['subsystem_info']),
+ ('ZAPI', 'nvme-subsystem-delete', ZRR['success']),
+ # idempptency
+ ('ZAPI', 'nvme-subsystem-get-iter', ZRR['no_records']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'state': 'absent',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_ensure_get_host_map_called():
+ ''' test get_subsystem_host_map() for non-existent subsystem'''
+ register_responses([
+ ('ZAPI', 'nvme-subsystem-map-get-iter', ZRR['no_records']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.get_subsystem_host_map('paths') is None
+
+
+def test_ensure_get_host_map_called_existing():
+ ''' test get_subsystem_host_map() for existing subsystem'''
+ register_responses([
+ ('ZAPI', 'nvme-subsystem-map-get-iter', ZRR['subsystem_info']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.get_subsystem_host_map('paths')
+
+
+def test_successful_add():
+ ''' adding subsystem host/map and testing idempotency '''
+ register_responses([
+ ('ZAPI', 'nvme-subsystem-get-iter', ZRR['subsystem_info']),
+ ('ZAPI', 'nvme-subsystem-host-get-iter', ZRR['no_records']),
+ ('ZAPI', 'nvme-subsystem-map-get-iter', ZRR['no_records']),
+ ('ZAPI', 'nvme-subsystem-host-add', ZRR['success']),
+ ('ZAPI', 'nvme-subsystem-map-add', ZRR['success']),
+ ('ZAPI', 'nvme-subsystem-map-add', ZRR['success']),
+ # idempptency
+ ('ZAPI', 'nvme-subsystem-get-iter', ZRR['subsystem_info']),
+ ('ZAPI', 'nvme-subsystem-host-get-iter', ZRR['subsystem_info_one_host']),
+ ('ZAPI', 'nvme-subsystem-map-get-iter', ZRR['subsystem_info']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'ostype': 'windows',
+ 'paths': ['abcd/vol', 'xyz/vol'],
+ 'hosts': 'host-nqn'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successful_remove():
+ ''' removing subsystem host/map and testing idempotency '''
+ register_responses([
+ ('ZAPI', 'nvme-subsystem-get-iter', ZRR['subsystem_info']),
+ ('ZAPI', 'nvme-subsystem-map-get-iter', ZRR['subsystem_info']),
+ ('ZAPI', 'nvme-subsystem-map-remove', ZRR['success']),
+ # idempptency
+ ('ZAPI', 'nvme-subsystem-get-iter', ZRR['subsystem_info_one_path']),
+ ('ZAPI', 'nvme-subsystem-map-get-iter', ZRR['subsystem_info_one_path']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'paths': ['abcd/vol'],
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.has_netapp_lib')
+def test_fail_netapp_lib_error(mock_has_netapp_lib):
+ mock_has_netapp_lib.return_value = False
+ module_args = {
+ "use_rest": "never"
+ }
+ assert 'Error: the python NetApp-Lib module is required. Import error: None' == call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_error_handling():
+ ''' test error handling on ZAPI calls '''
+ register_responses([
+ ('ZAPI', 'nvme-subsystem-get-iter', ZRR['error']),
+ ('ZAPI', 'nvme-subsystem-create', ZRR['error']),
+ ('ZAPI', 'nvme-subsystem-delete', ZRR['error']),
+ ('ZAPI', 'nvme-subsystem-map-get-iter', ZRR['error']),
+ ('ZAPI', 'nvme-subsystem-host-add', ZRR['error']),
+ ('ZAPI', 'nvme-subsystem-map-add', ZRR['error']),
+ ('ZAPI', 'nvme-subsystem-host-remove', ZRR['error']),
+ ('ZAPI', 'nvme-subsystem-map-remove', ZRR['error']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'ostype': 'windows'
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ error = zapi_error_message('Error fetching subsystem info')
+ assert error in expect_and_capture_ansible_exception(my_obj.get_subsystem, 'fail')['msg']
+ error = zapi_error_message('Error creating subsystem for subsystem')
+ assert error in expect_and_capture_ansible_exception(my_obj.create_subsystem, 'fail')['msg']
+ error = zapi_error_message('Error deleting subsystem for subsystem')
+ assert error in expect_and_capture_ansible_exception(my_obj.delete_subsystem, 'fail')['msg']
+ error = zapi_error_message('Error fetching subsystem path info')
+ assert error in expect_and_capture_ansible_exception(my_obj.get_subsystem_host_map, 'fail', 'paths')['msg']
+ error = zapi_error_message('Error adding hostname for subsystem subsystem')
+ assert error in expect_and_capture_ansible_exception(my_obj.add_subsystem_host_map, 'fail', ['hostname'], 'hosts')['msg']
+ error = zapi_error_message('Error adding pathname for subsystem subsystem')
+ assert error in expect_and_capture_ansible_exception(my_obj.add_subsystem_host_map, 'fail', ['pathname'], 'paths')['msg']
+ error = zapi_error_message('Error removing hostname for subsystem subsystem')
+ assert error in expect_and_capture_ansible_exception(my_obj.remove_subsystem_host_map, 'fail', ['hostname'], 'hosts')['msg']
+ error = zapi_error_message('Error removing pathname for subsystem subsystem')
+ assert error in expect_and_capture_ansible_exception(my_obj.remove_subsystem_host_map, 'fail', ['pathname'], 'paths')['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_nvme_subsystem_rest.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_nvme_subsystem_rest.py
new file mode 100644
index 000000000..693816662
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_nvme_subsystem_rest.py
@@ -0,0 +1,256 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+""" unit tests for Ansible module: na_ontap_nvme_subsystem when using REST """
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import sys
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import\
+ patch_ansible, call_main, create_and_apply, create_module, expect_and_capture_ansible_exception
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import\
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_error_message, rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_nvme_subsystem\
+ import NetAppONTAPNVMESubsystem as my_module, main as my_main # module under test
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+SRR = rest_responses({
+ 'nvme_subsystem': (200, {
+ "hosts": [{
+ "nqn": "nqn.1992-08.com.netapp:sn.f2207584d03611eca164005056b3bd39:subsystem.test3"
+ }],
+ "name": "subsystem1",
+ "uuid": "81068ae6-4674-4d78-a8b7-dadb23f67edf",
+ "comment": "string",
+ "svm": {
+ "name": "svm1",
+ "uuid": "02c9e252-41be-11e9-81d5-00a0986138f7"
+ },
+ "os_type": "hyper_v",
+ "subsystem_maps": [{
+ "namespace": {
+ "name": "/vol/test3/disk1",
+ "uuid": "1cd8a442-86d1-11e0-ae1c-123478563412"
+ },
+ }],
+ "enabled": True,
+ }, None),
+ # 'nvme_host': (200, [{
+ # "nqn": "nqn.1992-08.com.netapp:sn.f2207584d03611eca164005056b3bd39:subsystem.test3"
+ # }], None),
+ 'nvme_map': (200, {
+ "records": [{
+ "namespace": {
+ "name": "/vol/test3/disk1",
+ "uuid": "1cd8a442-86d1-11e0-ae1c-123478563412",
+ },
+ }], "num_records": 1,
+ }, None),
+
+ 'nvme_host': (200, {
+ "records": [
+ {
+ "nqn": "nqn.1992-08.com.netapp:sn.f2207584d03611eca164005056b3bd39:subsystem.test3",
+ "subsystem": {
+ "uuid": "81068ae6-4674-4d78-a8b7-dadb23f67edf"
+ }
+ }
+ ],
+ "num_records": 1
+ }, None),
+
+ 'error_svm_not_found': (400, None, 'SVM "ansibleSVM" does not exist.')
+})
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'use_rest': 'always',
+ 'vserver': 'ansibleSVM',
+ 'ostype': 'linux',
+ 'subsystem': 'subsystem1',
+}
+
+
+def test_get_subsystem_none():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'protocols/nvme/subsystems', SRR['empty_records'])
+ ])
+ my_module_object = create_module(my_module, DEFAULT_ARGS)
+ assert my_module_object.get_subsystem_rest() is None
+
+
+def test_get_subsystem_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'protocols/nvme/subsystems', SRR['generic_error']),
+ ])
+ my_module_object = create_module(my_module, DEFAULT_ARGS)
+ msg = 'Error fetching subsystem info for vserver: ansibleSVM'
+ assert msg in expect_and_capture_ansible_exception(my_module_object.get_subsystem_rest, 'fail')['msg']
+
+
+def test_create_subsystem():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'protocols/nvme/subsystems', SRR['empty_records']),
+ ('POST', 'protocols/nvme/subsystems', SRR['empty_good']),
+ ('GET', 'protocols/nvme/subsystems', SRR['nvme_subsystem']),
+ ])
+ assert create_and_apply(my_module, DEFAULT_ARGS)['changed']
+
+
+def test_create_subsystem_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('POST', 'protocols/nvme/subsystems', SRR['generic_error']),
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'protocols/nvme/subsystems', SRR['zero_records']),
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ error = 'Error creating subsystem for vserver ansibleSVM: calling: protocols/nvme/subsystems: got Expected error.'
+ assert error in expect_and_capture_ansible_exception(my_obj.create_subsystem_rest, 'fail')['msg']
+ args = dict(DEFAULT_ARGS)
+ del args['ostype']
+ error = "Error: Missing required parameter 'ostype' for creating subsystem"
+ assert error in call_main(my_main, args, fail=True)['msg']
+
+
+def test_delete_subsystem():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'protocols/nvme/subsystems', SRR['nvme_subsystem']),
+ ('DELETE', 'protocols/nvme/subsystems/81068ae6-4674-4d78-a8b7-dadb23f67edf', SRR['empty_good'])
+ ])
+ module_args = {'state': 'absent'}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_delete_subsystem_no_vserver():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'protocols/nvme/subsystems', SRR['error_svm_not_found']),
+ ])
+ module_args = {'state': 'absent'}
+ assert not create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_delete_subsystem_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('DELETE', 'protocols/nvme/subsystems/81068ae6-4674-4d78-a8b7-dadb23f67edf', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['state'] = 'absent'
+ my_obj.subsystem_uuid = '81068ae6-4674-4d78-a8b7-dadb23f67edf'
+ error = expect_and_capture_ansible_exception(my_obj.delete_subsystem_rest, 'fail')['msg']
+ msg = 'Error deleting subsystem for vserver ansibleSVM: calling: protocols/nvme/subsystems/81068ae6-4674-4d78-a8b7-dadb23f67edf: got Expected error.'
+ assert msg == error
+
+
+def test_add_subsystem_host():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'protocols/nvme/subsystems', SRR['empty_records']),
+ ('POST', 'protocols/nvme/subsystems', SRR['empty_good']),
+ ('GET', 'protocols/nvme/subsystems', SRR['nvme_subsystem']),
+ ('POST', 'protocols/nvme/subsystems/81068ae6-4674-4d78-a8b7-dadb23f67edf/hosts', SRR['empty_good'])
+ ])
+ module_args = {'hosts': ['nqn.1992-08.com.netapp:sn.f2207584d03611eca164005056b3bd39:subsystem.test3']}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_add_only_subsystem_host():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'protocols/nvme/subsystems', SRR['nvme_subsystem']),
+ ('GET', 'protocols/nvme/subsystems/81068ae6-4674-4d78-a8b7-dadb23f67edf/hosts', SRR['empty_records']),
+ ('POST', 'protocols/nvme/subsystems/81068ae6-4674-4d78-a8b7-dadb23f67edf/hosts', SRR['empty_good'])
+ ])
+ module_args = {'hosts': ['nqn.1992-08.com.netapp:sn.f2207584d03611eca164005056b3bd39:subsystem.test3']}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_add_subsystem_map():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'protocols/nvme/subsystems', SRR['empty_records']),
+ ('POST', 'protocols/nvme/subsystems', SRR['empty_good']),
+ ('GET', 'protocols/nvme/subsystems', SRR['nvme_subsystem']),
+ ('POST', 'protocols/nvme/subsystem-maps', SRR['empty_good'])
+ ])
+ module_args = {'paths': ['/vol/test3/disk1']}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_add_only_subsystem_map():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'protocols/nvme/subsystems', SRR['nvme_subsystem']),
+ ('GET', 'protocols/nvme/subsystem-maps', SRR['empty_records']),
+ ('POST', 'protocols/nvme/subsystem-maps', SRR['empty_good'])
+ ])
+ module_args = {'paths': ['/vol/test3/disk1']}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_remove_only_subsystem_host():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'protocols/nvme/subsystems', SRR['nvme_subsystem']),
+ ('GET', 'protocols/nvme/subsystems/81068ae6-4674-4d78-a8b7-dadb23f67edf/hosts', SRR['nvme_host']),
+ ('POST', 'protocols/nvme/subsystems/81068ae6-4674-4d78-a8b7-dadb23f67edf/hosts', SRR['empty_good']),
+ ('DELETE', 'protocols/nvme/subsystems/81068ae6-4674-4d78-a8b7-dadb23f67edf/'
+ 'hosts/nqn.1992-08.com.netapp:sn.f2207584d03611eca164005056b3bd39:subsystem.test3', SRR['empty_good'])
+ ])
+ module_args = {'hosts': ['nqn.1992-08.com.netapp:sn.f2207584d03611eca164005056b3bd39:subsystem.test']}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_remove_only_subsystem_map():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'protocols/nvme/subsystems', SRR['nvme_subsystem']),
+ ('GET', 'protocols/nvme/subsystem-maps', SRR['nvme_map']),
+ ('POST', 'protocols/nvme/subsystem-maps', SRR['empty_good']),
+ ('DELETE', 'protocols/nvme/subsystem-maps/81068ae6-4674-4d78-a8b7-dadb23f67edf/1cd8a442-86d1-11e0-ae1c-123478563412', SRR['empty_good'])
+ ])
+ module_args = {'paths': ['/vol/test2/disk1']}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_errors():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'protocols/nvme/subsystems/None/hosts', SRR['generic_error']),
+ ('GET', 'protocols/nvme/subsystem-maps', SRR['generic_error']),
+ ('POST', 'protocols/nvme/subsystems/None/hosts', SRR['generic_error']),
+ ('POST', 'protocols/nvme/subsystem-maps', SRR['generic_error']),
+ ('DELETE', 'protocols/nvme/subsystems/None/hosts/host', SRR['generic_error']),
+ ('DELETE', 'protocols/nvme/subsystem-maps/None/None', SRR['generic_error'])
+ ])
+ my_module_object = create_module(my_module, DEFAULT_ARGS)
+ error = rest_error_message('Error fetching subsystem host info for vserver: ansibleSVM', 'protocols/nvme/subsystems/None/hosts')
+ assert error in expect_and_capture_ansible_exception(my_module_object.get_subsystem_host_map_rest, 'fail', 'hosts')['msg']
+ error = rest_error_message('Error fetching subsystem map info for vserver: ansibleSVM', 'protocols/nvme/subsystem-maps')
+ assert error in expect_and_capture_ansible_exception(my_module_object.get_subsystem_host_map_rest, 'fail', 'paths')['msg']
+ error = rest_error_message('Error adding [] for subsystem subsystem1', 'protocols/nvme/subsystems/None/hosts')
+ assert error in expect_and_capture_ansible_exception(my_module_object.add_subsystem_host_map_rest, 'fail', [], 'hosts')['msg']
+ error = rest_error_message('Error adding path for subsystem subsystem1', 'protocols/nvme/subsystem-maps')
+ assert error in expect_and_capture_ansible_exception(my_module_object.add_subsystem_host_map_rest, 'fail', ['path'], 'paths')['msg']
+ error = rest_error_message('Error removing host for subsystem subsystem1', 'protocols/nvme/subsystems/None/hosts/host')
+ assert error in expect_and_capture_ansible_exception(my_module_object.remove_subsystem_host_map_rest, 'fail', ['host'], 'hosts')['msg']
+ error = rest_error_message('Error removing path for subsystem subsystem1', 'protocols/nvme/subsystem-maps/None/None')
+ assert error in expect_and_capture_ansible_exception(my_module_object.remove_subsystem_host_map_rest, 'fail', ['path'], 'paths')['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_object_store.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_object_store.py
new file mode 100644
index 000000000..91430883c
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_object_store.py
@@ -0,0 +1,538 @@
+# (c) 2019, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+""" unit tests for Ansible module: na_ontap_object_store """
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat import unittest
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch, Mock
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_object_store \
+ import NetAppOntapObjectStoreConfig as my_module, main as my_main # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+# REST API canned responses when mocking send_request
+SRR = {
+ # common responses
+ 'is_rest': (200, {}, None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'zero_record': (200, dict(records=[], num_records=0), None),
+ 'end_of_sequence': (500, None, "Unexpected call to send_request"),
+ 'generic_error': (400, None, "Expected error"),
+ # module specific responses
+ 'get_uuid': (200, {'records': [{'uuid': 'ansible'}]}, None),
+ 'get_object_store': (200,
+ {'uuid': 'ansible',
+ 'name': 'ansible',
+ 'provider_type': 'abc',
+ 'access_key': 'abc',
+ 'owner': 'fabricpool'
+ }, None)
+}
+
+
+class MockONTAPConnection(object):
+ ''' mock server connection to ONTAP host '''
+
+ def __init__(self, kind=None):
+ ''' save arguments '''
+ self.type = kind
+ self.xml_in = None
+ self.xml_out = None
+
+ def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
+ ''' mock invoke_successfully returning xml data '''
+ self.xml_in = xml
+ print('IN:', xml.to_string())
+ if self.type == 'object_store':
+ xml = self.build_object_store_info()
+ elif self.type == 'object_store_not_found':
+ self.type = 'object_store'
+ raise netapp_utils.zapi.NaApiError(code='15661', message="This exception is from the unit test - 15661")
+ elif self.type == 'object_store_fail':
+ raise netapp_utils.zapi.NaApiError(code='TEST', message="This exception is from the unit test")
+ self.xml_out = xml
+ print('OUT:', xml.to_string())
+ return xml
+
+ @staticmethod
+ def build_object_store_info():
+ ''' build xml data for object store '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ data = {'attributes':
+ {'aggr-object-store-config-info':
+ {'object-store-name': 'ansible',
+ 'provider-type': 'abc',
+ 'access-key': 'abc',
+ 'server': 'abc',
+ 's3-name': 'abc',
+ 'ssl-enabled': 'true',
+ 'port': '1234',
+ 'is-certificate-validation-enabled': 'true'}
+ }
+ }
+ xml.translate_struct(data)
+ print(xml.to_string())
+ return xml
+
+
+class TestMyModule(unittest.TestCase):
+ ''' a group of related Unit Tests '''
+
+ def setUp(self):
+ self.server = MockONTAPConnection()
+ # whether to use a mock or a simulator
+ self.onbox = False
+
+ def set_default_args(self):
+ if self.onbox:
+ hostname = '10.10.10.10'
+ username = 'admin'
+ password = 'password'
+ name = 'ansible'
+ else:
+ hostname = 'hostname'
+ username = 'username'
+ password = 'password'
+ name = 'ansible'
+ return dict({
+ 'hostname': hostname,
+ 'username': username,
+ 'password': password,
+ 'name': name,
+ 'feature_flags': {'no_cserver_ems': True}
+ })
+
+ def call_command(self, module_args):
+ ''' utility function to call apply '''
+ module_args.update(self.set_default_args())
+ set_module_args(module_args)
+ my_obj = my_module()
+ my_obj.asup_log_for_cserver = Mock(return_value=None)
+ if not self.onbox:
+ # mock the connection
+ my_obj.server = MockONTAPConnection('object_store')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ return exc.value.args[0]['changed']
+
+ def test_module_fail_when_required_args_missing(self):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ my_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.has_netapp_lib')
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_negative_netapp_lib(self, mock_request, mock_has_netapp_lib):
+ ''' fetching details of object store '''
+ mock_request.side_effect = [
+ SRR['is_zapi'],
+ SRR['end_of_sequence']
+ ]
+ mock_has_netapp_lib.return_value = False
+ set_module_args(self.set_default_args())
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_main()
+ msg = 'Error: the python NetApp-Lib module is required. Import error: None'
+ assert msg in exc.value.args[0]['msg']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_ensure_object_store_get_called(self, mock_request):
+ ''' fetching details of object store '''
+ mock_request.side_effect = [
+ SRR['is_zapi'],
+ SRR['end_of_sequence']
+ ]
+ set_module_args(self.set_default_args())
+ my_obj = my_module()
+ my_obj.server = self.server
+ assert my_obj.get_aggr_object_store() is None
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_ensure_get_called_existing(self, mock_request):
+ ''' test for existing object store'''
+ mock_request.side_effect = [
+ SRR['is_zapi'],
+ SRR['end_of_sequence']
+ ]
+ set_module_args(self.set_default_args())
+ my_obj = my_module()
+ my_obj.server = MockONTAPConnection(kind='object_store')
+ object_store = my_obj.get_aggr_object_store()
+ assert object_store
+ assert 'name' in object_store
+ assert object_store['name'] == 'ansible'
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_object_store_create(self, mock_request):
+ ''' test for creating object store'''
+ mock_request.side_effect = [
+ SRR['is_zapi'],
+ SRR['end_of_sequence']
+ ]
+ module_args = {
+ 'provider_type': 'abc',
+ 'server': 'abc',
+ 'container': 'abc',
+ 'access_key': 'abc',
+ 'secret_password': 'abc',
+ 'port': 1234,
+ 'certificate_validation_enabled': True,
+ 'ssl_enabled': True
+ }
+ module_args.update(self.set_default_args())
+ set_module_args(module_args)
+ my_obj = my_module()
+ if not self.onbox:
+ # mock the connection
+ my_obj.server = MockONTAPConnection(kind='object_store_not_found')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_object_store_negative_create_bad_owner(self, mock_request):
+ ''' test for creating object store'''
+ mock_request.side_effect = [
+ SRR['is_zapi'],
+ SRR['end_of_sequence']
+ ]
+ module_args = {
+ 'provider_type': 'abc',
+ 'server': 'abc',
+ 'container': 'abc',
+ 'access_key': 'abc',
+ 'secret_password': 'abc',
+ 'port': 1234,
+ 'certificate_validation_enabled': True,
+ 'ssl_enabled': True,
+ 'owner': 'snapmirror'
+ }
+ module_args.update(self.set_default_args())
+ set_module_args(module_args)
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_module()
+ print(exc.value.args[0])
+ assert exc.value.args[0]['msg'] == 'Error: unsupported value for owner: snapmirror when using ZAPI.'
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_object_store_delete(self, mock_request):
+ ''' test for deleting object store'''
+ mock_request.side_effect = [
+ SRR['is_zapi'],
+ SRR['end_of_sequence']
+ ]
+ module_args = {
+ 'state': 'absent',
+ }
+ changed = self.call_command(module_args)
+ assert changed
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_negative_object_store_modify(self, mock_request):
+ ''' test for modifying object store'''
+ mock_request.side_effect = [
+ SRR['is_zapi'],
+ SRR['end_of_sequence']
+ ]
+ module_args = {
+ 'provider_type': 'abc',
+ 'server': 'abc2',
+ 'container': 'abc',
+ 'access_key': 'abc2',
+ 'secret_password': 'abc'
+ }
+ module_args.update(self.set_default_args())
+ set_module_args(module_args)
+ my_obj = my_module()
+ my_obj.server = MockONTAPConnection(kind='object_store')
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ msg = 'Error - modify is not supported with ZAPI'
+ assert msg in exc.value.args[0]['msg']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_error(self, mock_request):
+ mock_request.side_effect = [
+ SRR['is_zapi'],
+ SRR['end_of_sequence']
+ ]
+ set_module_args(self.set_default_args())
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['generic_error'],
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['msg'] == 'Error calling: cloud/targets: got %s.' % SRR['generic_error'][2]
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_successful_create(self, mock_request):
+ data = {
+ 'provider_type': 'abc',
+ 'server': 'abc',
+ 'container': 'abc',
+ 'access_key': 'abc',
+ 'secret_password': 'abc',
+ 'port': 1234,
+ 'certificate_validation_enabled': True,
+ 'ssl_enabled': True
+ }
+ data.update(self.set_default_args())
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['zero_record'],
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_negative_create_missing_parameter(self, mock_request):
+ data = {
+ 'provider_type': 'abc',
+ 'server': 'abc',
+ 'container': 'abc',
+ 'secret_password': 'abc',
+ 'port': 1234,
+ 'certificate_validation_enabled': True,
+ 'ssl_enabled': True
+ }
+ data.update(self.set_default_args())
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['zero_record'],
+ SRR['generic_error'],
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ msg = 'Error provisioning object store ansible: one of the following parameters are missing'
+ assert msg in exc.value.args[0]['msg']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_negative_create_api_error(self, mock_request):
+ data = {
+ 'provider_type': 'abc',
+ 'server': 'abc',
+ 'container': 'abc',
+ 'access_key': 'abc',
+ 'secret_password': 'abc',
+ 'port': 1234,
+ 'certificate_validation_enabled': True,
+ 'ssl_enabled': True
+ }
+ data.update(self.set_default_args())
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['zero_record'],
+ SRR['generic_error'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_main()
+ assert exc.value.args[0]['msg'] == 'Error calling: cloud/targets: got %s.' % SRR['generic_error'][2]
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_successful_modify(self, mock_request):
+ data = {
+ 'provider_type': 'abc',
+ 'server': 'abc',
+ 'container': 'abc2',
+ 'access_key': 'abc2',
+ 'secret_password': 'abc'
+ }
+ data.update(self.set_default_args())
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_object_store'],
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print(mock_request.mock_calls)
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_negative_modify_rest_error(self, mock_request):
+ data = {
+ 'provider_type': 'abc',
+ 'server': 'abc',
+ 'container': 'abc2',
+ 'access_key': 'abc2',
+ 'secret_password': 'abc'
+ }
+ data.update(self.set_default_args())
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_object_store'],
+ SRR['generic_error'],
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['msg'] == 'Error calling: cloud/targets/ansible: got %s.' % SRR['generic_error'][2]
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_negative_modify_owner(self, mock_request):
+ data = {
+ 'provider_type': 'abc',
+ 'server': 'abc',
+ 'container': 'abc2',
+ 'access_key': 'abc2',
+ 'secret_password': 'abc',
+ 'owner': 'snapmirror'
+ }
+ data.update(self.set_default_args())
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_object_store'],
+ SRR['generic_error'],
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['msg'] == 'Error modifying object store, owner cannot be changed. Found: snapmirror.'
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_successful_modify_password(self, mock_request):
+ data = {
+ 'provider_type': 'abc',
+ 'server': 'abc',
+ 'container': 'abc',
+ 'access_key': 'abc',
+ 'secret_password': 'abc2',
+ 'change_password': True
+
+ }
+ data.update(self.set_default_args())
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_object_store'],
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print(mock_request.mock_calls)
+ assert exc.value.args[0]['changed']
+ assert 'secret_password' in exc.value.args[0]['modify']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_successful_idempotent(self, mock_request):
+ data = {
+ 'provider_type': 'abc',
+ 'server': 'abc',
+ 'container': 'abc',
+ 'access_key': 'abc',
+ 'secret_password': 'abc2'
+ }
+ data.update(self.set_default_args())
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_object_store'],
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print(mock_request.mock_calls)
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_successful_delete(self, mock_request):
+ data = {
+ 'state': 'absent',
+ }
+ data.update(self.set_default_args())
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_uuid'],
+ SRR['get_object_store'],
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_negative_delete(self, mock_request):
+ data = {
+ 'state': 'absent',
+ }
+ data.update(self.set_default_args())
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_object_store'],
+ SRR['generic_error'],
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['msg'] == 'Error calling: cloud/targets/ansible: got %s.' % SRR['generic_error'][2]
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_if_all_methods_catch_exception(self, mock_request):
+ mock_request.side_effect = [
+ SRR['is_zapi'],
+ SRR['end_of_sequence']
+ ]
+ module_args = {
+ 'provider_type': 'abc',
+ 'server': 'abc',
+ 'container': 'abc',
+ 'access_key': 'abc',
+ 'secret_password': 'abc'
+ }
+ module_args.update(self.set_default_args())
+ set_module_args(module_args)
+ my_obj = my_module()
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('object_store_fail')
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.get_aggr_object_store()
+ assert '' in exc.value.args[0]['msg']
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.create_aggr_object_store(None)
+ assert 'Error provisioning object store config ' in exc.value.args[0]['msg']
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.delete_aggr_object_store()
+ assert 'Error removing object store config ' in exc.value.args[0]['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_partitions.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_partitions.py
new file mode 100644
index 000000000..975ffb161
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_partitions.py
@@ -0,0 +1,515 @@
+# (c) 2021, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test for ONTAP disks Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_partitions \
+ import NetAppOntapPartitions as my_module # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+def default_args():
+ args = {
+ 'disk_type': 'SAS',
+ 'partitioning_method': 'root_data',
+ 'partition_type': 'data',
+ 'partition_count': 13,
+ 'hostname': '10.10.10.10',
+ 'username': 'username',
+ 'password': 'password',
+ 'node': 'node1',
+ 'use_rest': 'always'
+ }
+ return args
+
+
+# REST API canned responses when mocking send_request
+SRR = {
+ # common responses
+ 'is_rest': (200, dict(version=dict(generation=9, major=9, minor=0, full='dummy')), None),
+ 'is_rest_9_8': (200, dict(version=dict(generation=9, major=8, minor=0, full='dummy')), None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'zero_record': (200, dict(records=[], num_records=0), None),
+ 'one_record_uuid': (200, dict(records=[dict(uuid='a1b2c3')], num_records=1), None),
+ 'end_of_sequence': (500, None, "Unexpected call to send_request"),
+ 'generic_error': (400, None, "Expected error"),
+ 'owned_partitions_record': (200, {
+ "records": [
+ {
+ "partition": "1.0.0.P1",
+ "container_type": "spare",
+ "partitioning_method": "root_data",
+ "is_root": False,
+ "disk_type": "sas",
+ "home_node_name": "fas2552-rtp-13-02",
+ "owner_node_name": "fas2552-rtp-13-02"
+ },
+ {
+ "partition": "1.0.2.P1",
+ "container_type": "spare",
+ "partitioning_method": "root_data",
+ "is_root": False,
+ "disk_type": "sas",
+ "home_node_name": "fas2552-rtp-13-02",
+ "owner_node_name": "fas2552-rtp-13-02"
+ },
+ {
+ "partition": "1.0.4.P1",
+ "container_type": "spare",
+ "partitioning_method": "root_data",
+ "is_root": False,
+ "disk_type": "sas",
+ "home_node_name": "fas2552-rtp-13-02",
+ "owner_node_name": "fas2552-rtp-13-02"
+ }
+ ],
+ "num_records": 3
+ }, None),
+
+ 'unassigned_partitions_record': (200, {
+ "records": [
+ {
+ "partition": "1.0.25.P1",
+ "container_type": "spare",
+ "partitioning_method": "root_data",
+ "is_root": False,
+ "disk_type": "sas",
+ "home_node_name": "fas2552-rtp-13-02",
+ "owner_node_name": "fas2552-rtp-13-02"
+ },
+ {
+ "partition": "1.0.27.P1",
+ "container_type": "spare",
+ "partitioning_method": "root_data",
+ "is_root": False,
+ "disk_type": "sas",
+ "home_node_name": "fas2552-rtp-13-02",
+ "owner_node_name": "fas2552-rtp-13-02"
+ },
+ ],
+ "num_records": 2
+ }, None),
+
+ 'unassigned_disks_record': (200, {
+ "records": [
+ {
+ 'name': '1.0.27',
+ 'type': 'sas',
+ 'container_type': 'unassigned',
+ 'home_node': {'name': 'node1'}},
+ {
+ 'name': '1.0.28',
+ 'type': 'sas',
+ 'container_type': 'unassigned',
+ 'home_node': {'name': 'node1'}}
+ ],
+ 'num_records': 2}, None),
+
+ 'home_spare_disk_info_record': (200, {
+ 'records': [],
+ 'num_records': 2}, None),
+
+ 'spare_partitions_record': (200, {
+ "records": [
+ {
+ "partition": "1.0.0.P1",
+ "container_type": "spare",
+ "partitioning_method": "root_data",
+ "is_root": False,
+ "disk_type": "sas",
+ "home_node_name": "fas2552-rtp-13-02",
+ "owner_node_name": "fas2552-rtp-13-02"
+ },
+ {
+ "partition": "1.0.1.P1",
+ "container_type": "spare",
+ "partitioning_method": "root_data",
+ "is_root": False,
+ "disk_type": "sas",
+ "home_node_name": "fas2552-rtp-13-02",
+ "owner_node_name": "fas2552-rtp-13-02"
+ }
+ ], 'num_records': 2
+ }, None),
+
+ 'partner_spare_partitions_record': (200, {
+ "records": [
+ {
+ "partition": "1.0.1.P1",
+ "container_type": "spare",
+ "partitioning_method": "root_data",
+ "is_root": False,
+ "disk_type": "sas",
+ "home_node_name": "node2",
+ "owner_node_name": "node2"
+ },
+ {
+ "partition": "1.0.3.P1",
+ "container_type": "spare",
+ "partitioning_method": "root_data",
+ "is_root": False,
+ "disk_type": "sas",
+ "home_node_name": "node2",
+ "owner_node_name": "node2"
+ },
+ {
+ "partition": "1.0.5.P1",
+ "container_type": "spare",
+ "partitioning_method": "root_data",
+ "is_root": False,
+ "disk_type": "sas",
+ "home_node_name": "node2",
+ "owner_node_name": "node2"
+ },
+ {
+ "partition": "1.0.23.P1",
+ "container_type": "spare",
+ "partitioning_method": "root_data",
+ "is_root": False,
+ "disk_type": "sas",
+ "home_node_name": "node2",
+ "owner_node_name": "node2"
+ }
+ ], "num_records": 4
+ }, None),
+
+ 'partner_node_name_record': (200, {
+ 'records': [
+ {
+ 'uuid': 'c345c182-a6a0-11eb-af7b-00a0984839de',
+ 'name': 'node2',
+ 'ha': {
+ 'partners': [
+ {'name': 'node1'}
+ ]
+ }
+ }
+ ], 'num_records': 1
+ }, None),
+
+ 'partner_spare_disks_record': (200, {
+ 'records': [
+ {
+ 'name': '1.0.22',
+ 'type': 'sas',
+ 'container_type': 'spare',
+ 'home_node': {'name': 'node2'}
+ },
+ {
+ 'name': '1.0.20',
+ 'type': 'sas',
+ 'container_type': 'spare',
+ 'home_node': {'name': 'node2'}
+ },
+ {
+ 'name': '1.0.18',
+ 'type': 'sas',
+ 'container_type': 'spare',
+ 'home_node': {'name': 'node2'}
+ },
+ {
+ 'name': '1.0.16',
+ 'type': 'sas',
+ 'container_type': 'spare',
+ 'home_node': {'name': 'node2'}
+ }
+ ], 'num_records': 4
+ }, None),
+
+ 'adp2_owned_partitions_record': (200, {
+ "records": [
+ {
+ "partition": "1.0.0.P1",
+ "container_type": "spare",
+ "partitioning_method": "root_data1_data2",
+ "is_root": False,
+ "disk_type": "ssd",
+ "home_node_name": "aff300-rtp-2b",
+ "owner_node_name": "aff300-rtp-2b"
+ },
+ {
+ "partition": "1.0.1.P1",
+ "container_type": "spare",
+ "partitioning_method": "root_data1_data2",
+ "is_root": False,
+ "disk_type": "ssd",
+ "home_node_name": "aff300-rtp-2b",
+ "owner_node_name": "aff300-rtp-2b"
+ },
+ {
+ "partition": "1.0.23.P1",
+ "container_type": "spare",
+ "partitioning_method": "root_data1_data2",
+ "is_root": False,
+ "disk_type": "ssd",
+ "home_node_name": "aff300-rtp-2b",
+ "owner_node_name": "aff300-rtp-2b"
+ }
+ ], "num_records": 3
+ }, None),
+}
+
+
+def test_rest_missing_arguments(patch_ansible): # pylint: disable=redefined-outer-name,unused-argument ##WHAT DOES THIS METHOD DO
+ ''' create scope '''
+ args = dict(default_args())
+ del args['hostname']
+ set_module_args(args)
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_module()
+ msg = 'missing required arguments: hostname'
+ assert exc.value.args[0]['msg'] == msg
+
+
+# get unassigned partitions
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_assign_unassigned_disks(mock_request, patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' Steal disks from partner node and assign them to the requested node '''
+ args = dict(default_args())
+ args['partition_count'] = 5
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['owned_partitions_record'],
+ SRR['home_spare_disk_info_record'],
+ SRR['unassigned_partitions_record'],
+ SRR['unassigned_disks_record'],
+ SRR['partner_node_name_record'],
+ SRR['partner_spare_partitions_record'],
+ SRR['partner_spare_disks_record'],
+ SRR['empty_good'], # assign
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is True
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 6
+
+
+# assign unassigned partitions + steal 2 partner spare partitions
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_assign_unassigned_and_partner_spare_disks(mock_request, patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' Steal disks from partner node and assign them to the requested node '''
+ args = dict(default_args())
+ args['partition_count'] = 7
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['owned_partitions_record'],
+ SRR['home_spare_disk_info_record'],
+ SRR['unassigned_partitions_record'],
+ SRR['unassigned_disks_record'],
+ SRR['partner_node_name_record'],
+ SRR['partner_spare_partitions_record'],
+ SRR['partner_spare_disks_record'],
+ SRR['empty_good'], # unassign
+ SRR['empty_good'], # assign
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is True
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 9
+
+
+# assign unassigned partitions + steal 2 partner spare partitions
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_assign_unassigned_and_partner_spare_partitions_and_disks(mock_request, patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' Steal disks from partner node and assign them to the requested node '''
+ args = dict(default_args())
+ args['partition_count'] = 6
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['owned_partitions_record'],
+ SRR['home_spare_disk_info_record'],
+ SRR['unassigned_partitions_record'],
+ SRR['unassigned_disks_record'],
+ SRR['partner_node_name_record'],
+ SRR['partner_spare_partitions_record'],
+ SRR['partner_spare_disks_record'],
+ SRR['empty_good'], # unassign
+ SRR['empty_good'], # assign
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is True
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 8
+
+
+# Should unassign partitions
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_unassign(mock_request, patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' Steal disks from partner node and assign them to the requested node '''
+ args = dict(default_args())
+ args['partition_count'] = 2 # change this number to be less than currently assigned partions to the node
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['owned_partitions_record'],
+ SRR['unassigned_partitions_record'],
+ SRR['spare_partitions_record'],
+ SRR['empty_good'], # unassign
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is True
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 5
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_no_action(mock_request, patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' disk_count matches arguments, do nothing '''
+ args = dict(default_args())
+ args['partition_count'] = 3
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['owned_partitions_record'],
+ SRR['unassigned_partitions_record'],
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is False
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 3
+
+
+# ADP2
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_assign_unassigned_disks_adp2(mock_request, patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' Steal disks from partner node and assign them to the requested node '''
+ args = dict(default_args())
+ args['partitioning_method'] = 'root_data1_data2'
+ args['partition_type'] = 'data1'
+ args['partition_count'] = 5 # change this dependant on data1 partitions
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['adp2_owned_partitions_record'],
+ SRR['home_spare_disk_info_record'],
+ SRR['unassigned_partitions_record'],
+ SRR['unassigned_disks_record'],
+ SRR['partner_node_name_record'],
+ SRR['partner_spare_partitions_record'],
+ SRR['partner_spare_disks_record'],
+ SRR['empty_good'], # assign
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is True
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 6
+
+
+# assign unassigned partitions + steal 2 partner spare partitions
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_assign_unassigned_and_partner_spare_disks_adp2(mock_request, patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' Steal disks from partner node and assign them to the requested node '''
+ args = dict(default_args())
+ args['partitioning_method'] = 'root_data1_data2'
+ args['partition_type'] = 'data1'
+ args['partition_count'] = 7 # data1 partitions
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['adp2_owned_partitions_record'],
+ SRR['home_spare_disk_info_record'],
+ SRR['unassigned_partitions_record'],
+ SRR['unassigned_disks_record'],
+ SRR['partner_node_name_record'],
+ SRR['partner_spare_partitions_record'],
+ SRR['partner_spare_disks_record'],
+ SRR['empty_good'], # unassign
+ SRR['empty_good'], # assign
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is True
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 9
+
+
+# assign unassigned partitions + steal 2 partner spare partitions
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_assign_unassigned_and_partner_spare_partitions_and_disks_adp2(mock_request, patch_ansible):
+ ''' Steal disks from partner node and assign them to the requested node '''
+ args = dict(default_args())
+ args['partitioning_method'] = 'root_data1_data2'
+ args['partition_type'] = 'data1'
+ args['partition_count'] = 6 # change this dependant on data1 partitions
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['adp2_owned_partitions_record'],
+ SRR['home_spare_disk_info_record'],
+ SRR['unassigned_partitions_record'],
+ SRR['unassigned_disks_record'],
+ SRR['partner_node_name_record'],
+ SRR['partner_spare_partitions_record'],
+ SRR['partner_spare_disks_record'],
+ SRR['empty_good'], # unassign
+ SRR['empty_good'], # assign
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is True
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 8
+
+
+# Should unassign partitions
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_unassign_adp2(mock_request, patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' Steal disks from partner node and assign them to the requested node '''
+ args = dict(default_args())
+ args['partitioning_method'] = 'root_data1_data2'
+ args['partition_type'] = 'data1'
+ args['partition_count'] = 2 # change this number to be less than currently assigned partions to the node
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['adp2_owned_partitions_record'],
+ SRR['unassigned_partitions_record'],
+ SRR['spare_partitions_record'],
+ SRR['empty_good'], # unassign
+ # SRR['empty_good'], # assign
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is True
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 5
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ports.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ports.py
new file mode 100644
index 000000000..8256024ae
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ports.py
@@ -0,0 +1,864 @@
+# (c) 2019, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests for ONTAP Ansible module: na_ontap_port'''
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat import unittest
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import assert_no_warnings, set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import\
+ patch_request_and_invoke, register_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_ports \
+ import NetAppOntapPorts as port_module # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+class MockONTAPConnection(object):
+ ''' mock server connection to ONTAP host '''
+
+ def __init__(self, kind=None, data=None):
+ ''' save arguments '''
+ self.type = kind
+ self.params = data
+ self.xml_in = None
+ self.xml_out = None
+
+ def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
+ ''' mock invoke_successfully returning xml data '''
+ self.xml_in = xml
+ self.xml_out = xml
+ return xml
+
+
+class TestMyModule(unittest.TestCase):
+ ''' a group of related Unit Tests '''
+
+ def mock_args(self, choice):
+ if choice == 'broadcast_domain':
+ return {
+ 'names': ['test_port_1', 'test_port_2'],
+ 'resource_name': 'test_domain',
+ 'resource_type': 'broadcast_domain',
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'use_rest': 'never'
+ }
+ elif choice == 'portset':
+ return {
+ 'names': ['test_lif'],
+ 'resource_name': 'test_portset',
+ 'resource_type': 'portset',
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'vserver': 'test_vserver',
+ 'use_rest': 'never'
+ }
+
+ def get_port_mock_object(self):
+ """
+ Helper method to return an na_ontap_port object
+ """
+ port_obj = port_module()
+ port_obj.server = MockONTAPConnection()
+ return port_obj
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_ports.NetAppOntapPorts.add_broadcast_domain_ports')
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_ports.NetAppOntapPorts.get_broadcast_domain_ports')
+ def test_successfully_add_broadcast_domain_ports(self, get_broadcast_domain_ports, add_broadcast_domain_ports):
+ ''' Test successful add broadcast domain ports '''
+ data = self.mock_args('broadcast_domain')
+ set_module_args(data)
+ get_broadcast_domain_ports.side_effect = [
+ []
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_port_mock_object().apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_ports.NetAppOntapPorts.add_broadcast_domain_ports')
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_ports.NetAppOntapPorts.get_broadcast_domain_ports')
+ def test_add_broadcast_domain_ports_idempotency(self, get_broadcast_domain_ports, add_broadcast_domain_ports):
+ ''' Test add broadcast domain ports idempotency '''
+ data = self.mock_args('broadcast_domain')
+ set_module_args(data)
+ get_broadcast_domain_ports.side_effect = [
+ ['test_port_1', 'test_port_2']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_port_mock_object().apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_ports.NetAppOntapPorts.add_portset_ports')
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_ports.NetAppOntapPorts.portset_get')
+ def test_successfully_add_portset_ports(self, portset_get, add_portset_ports):
+ ''' Test successful add portset ports '''
+ data = self.mock_args('portset')
+ set_module_args(data)
+ portset_get.side_effect = [
+ []
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_port_mock_object().apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_ports.NetAppOntapPorts.add_portset_ports')
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_ports.NetAppOntapPorts.portset_get')
+ def test_add_portset_ports_idempotency(self, portset_get, add_portset_ports):
+ ''' Test add portset ports idempotency '''
+ data = self.mock_args('portset')
+ set_module_args(data)
+ portset_get.side_effect = [
+ ['test_lif']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_port_mock_object().apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_ports.NetAppOntapPorts.add_broadcast_domain_ports')
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_ports.NetAppOntapPorts.get_broadcast_domain_ports')
+ def test_successfully_remove_broadcast_domain_ports(self, get_broadcast_domain_ports, add_broadcast_domain_ports):
+ ''' Test successful remove broadcast domain ports '''
+ data = self.mock_args('broadcast_domain')
+ data['state'] = 'absent'
+ set_module_args(data)
+ get_broadcast_domain_ports.side_effect = [
+ ['test_port_1', 'test_port_2']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_port_mock_object().apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_ports.NetAppOntapPorts.add_portset_ports')
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_ports.NetAppOntapPorts.portset_get')
+ def test_remove_add_portset_ports(self, portset_get, add_portset_ports):
+ ''' Test successful remove portset ports '''
+ data = self.mock_args('portset')
+ data['state'] = 'absent'
+ set_module_args(data)
+ portset_get.side_effect = [
+ ['test_lif']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_port_mock_object().apply()
+ assert exc.value.args[0]['changed']
+
+
+def default_args(choice=None, resource_name=None, portset_type=None):
+ args = {
+ 'state': 'present',
+ 'hostname': '10.10.10.10',
+ 'username': 'admin',
+ 'https': 'true',
+ 'validate_certs': 'false',
+ 'password': 'password',
+ 'use_rest': 'always'
+ }
+ if choice == 'broadcast_domain':
+ args['resource_type'] = "broadcast_domain"
+ args['resource_name'] = "domain2"
+ args['ipspace'] = "ip1"
+ args['names'] = ["mohan9cluster2-01:e0b", "mohan9cluster2-01:e0d"]
+ return args
+ if choice == 'portset':
+ args['portset_type'] = portset_type
+ args['resource_name'] = resource_name
+ args['resource_type'] = 'portset'
+ args['vserver'] = 'svm3'
+ return args
+ return args
+
+
+# REST API canned responses when mocking send_request
+SRR = {
+ # common responses
+ 'is_rest': (200, dict(version=dict(generation=9, major=9, minor=0, full='dummy')), None),
+ 'is_rest_9_6': (200, dict(version=dict(generation=9, major=6, minor=0, full='dummy')), None),
+ 'is_rest_9_7': (200, dict(version=dict(generation=9, major=7, minor=0, full='dummy')), None),
+ 'is_rest_9_8': (200, dict(version=dict(generation=9, major=8, minor=0, full='dummy')), None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'zero_record': (200, dict(records=[], num_records=0), None),
+ 'one_record_uuid': (200, dict(records=[dict(uuid='a1b2c3')], num_records=1), None),
+ 'end_of_sequence': (500, None, "Unexpected call to send_request"),
+ 'generic_error': (400, None, "Expected error"),
+ 'port_detail_e0d': (200, {
+ "num_records": 1,
+ "records": [
+ {
+ 'name': 'e0d',
+ 'node': {'name': 'mohan9cluster2-01'},
+ 'uuid': 'ea670505-2ab3-11ec-aa30-005056b3dfc8'
+ }]
+ }, None),
+ 'port_detail_e0a': (200, {
+ "num_records": 1,
+ "records": [
+ {
+ 'name': 'e0a',
+ 'node': {'name': 'mohan9cluster2-01'},
+ 'uuid': 'ea63420b-2ab3-11ec-aa30-005056b3dfc8'
+ }]
+ }, None),
+ 'port_detail_e0b': (200, {
+ "num_records": 1,
+ "records": [
+ {
+ 'name': 'e0b',
+ 'node': {'name': 'mohan9cluster2-01'},
+ 'uuid': 'ea64c0f2-2ab3-11ec-aa30-005056b3dfc8'
+ }]
+ }, None),
+ 'broadcast_domain_record': (200, {
+ "num_records": 1,
+ "records": [
+ {
+ "uuid": "4475a2c8-f8a0-11e8-8d33-005056bb986f",
+ "name": "domain1",
+ "ipspace": {"name": "ip1"},
+ "ports": [
+ {
+ "uuid": "ea63420b-2ab3-11ec-aa30-005056b3dfc8",
+ "name": "e0a",
+ "node": {
+ "name": "mohan9cluster2-01"
+ }
+ },
+ {
+ "uuid": "ea64c0f2-2ab3-11ec-aa30-005056b3dfc8",
+ "name": "e0b",
+ "node": {
+ "name": "mohan9cluster2-01"
+ }
+ },
+ {
+ "uuid": "ea670505-2ab3-11ec-aa30-005056b3dfc8",
+ "name": "e0d",
+ "node": {
+ "name": "mohan9cluster2-01"
+ }
+ }
+ ],
+ "mtu": 9000
+ }]
+ }, None),
+ 'broadcast_domain_record1': (200, {
+ "num_records": 1,
+ "records": [
+ {
+ "uuid": "4475a2c8-f8a0-11e8-8d33-005056bb986f",
+ "name": "domain2",
+ "ipspace": {"name": "ip1"},
+ "ports": [
+ {
+ "uuid": "ea63420b-2ab3-11ec-aa30-005056b3dfc8",
+ "name": "e0a",
+ "node": {
+ "name": "mohan9cluster2-01"
+ }
+ }
+ ],
+ "mtu": 9000
+ }]
+ }, None),
+ 'iscsips': (200, {
+ "num_records": 1,
+ "records": [
+ {
+ "uuid": "52e31a9d-72e2-11ec-95ea-005056b3b297",
+ "svm": {"name": "svm3"},
+ "name": "iscsips"
+ }]
+ }, None),
+ 'iscsips_updated': (200, {
+ "num_records": 1,
+ "records": [
+ {
+ "uuid": "52e31a9d-72e2-ec11-95ea-005056b3b298",
+ "svm": {"name": "svm3"},
+ "name": "iscsips_updated",
+ "interfaces": [
+ {
+ "uuid": "6a82e94a-72da-11ec-95ea-005056b3b297",
+ "ip": {"name": "lif_svm3_856"}
+ }]
+ }]
+ }, None),
+ 'mixedps': (200, {
+ "num_records": 1,
+ "records": [
+ {
+ "uuid": "ba02916a-72da-11ec-95ea-005056b3b297",
+ "svm": {
+ "name": "svm3"
+ },
+ "name": "mixedps",
+ "interfaces": [
+ {
+ "uuid": "2c373289-728f-11ec-95ea-005056b3b297",
+ "fc": {"name": "lif_svm3_681_2"}
+ },
+ {
+ "uuid": "d229cc03-7797-11ec-95ea-005056b3b297",
+ "fc": {"name": "lif_svm3_681_1_1"}
+ },
+ {
+ "uuid": "d24e03c6-7797-11ec-95ea-005056b3b297",
+ "fc": {"name": "lif_svm3_681_1_2"}
+ }]
+ }]
+ }, None),
+ 'mixedps_updated': (200, {
+ "num_records": 1,
+ "records": [
+ {
+ "uuid": "ba02916a-72da-11ec-95ea-005056b3b297",
+ "svm": {
+ "name": "svm3"
+ },
+ "name": "mixedps_updated",
+ "interfaces": [
+ {
+ "uuid": "6a82e94a-72da-11ec-95ea-005056b3b297",
+ "ip": {"name": "lif_svm3_856"}
+ },
+ {
+ "uuid": "2bf30606-728f-11ec-95ea-005056b3b297",
+ "fc": {"name": "lif_svm3_681_1"}
+ },
+ {
+ "uuid": "2c373289-728f-11ec-95ea-005056b3b297",
+ "fc": {"name": "lif_svm3_681_2"}
+ },
+ {
+ "uuid": "d229cc03-7797-11ec-95ea-005056b3b297",
+ "fc": {"name": "lif_svm3_681_1_1"}
+ },
+ {
+ "uuid": "d24e03c6-7797-11ec-95ea-005056b3b297",
+ "fc": {"name": "lif_svm3_681_1_2"}
+ }]
+ }]
+ }, None),
+ 'lif_svm3_681_1_1': (200, {
+ "num_records": 1,
+ "records": [{"uuid": "d229cc03-7797-11ec-95ea-005056b3b297"}]
+ }, None),
+ 'lif_svm3_681_1_2': (200, {
+ "num_records": 1,
+ "records": [{"uuid": "d24e03c6-7797-11ec-95ea-005056b3b297"}]
+ }, None),
+ 'lif_svm3_681_1': (200, {
+ "num_records": 1,
+ "records": [{"uuid": "2bf30606-728f-11ec-95ea-005056b3b297"}]
+ }, None),
+ 'lif_svm3_681_2': (200, {
+ "num_records": 1,
+ "records": [{"uuid": "2c373289-728f-11ec-95ea-005056b3b297"}]
+ }, None),
+ 'lif_svm3_856': (200, {
+ "num_records": 1,
+ "records": [{"uuid": "6a82e94a-72da-11ec-95ea-005056b3b297"}]
+ }, None)
+}
+
+
+def test_module_fail_when_required_args_missing(patch_ansible):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args(dict(hostname=''))
+ port_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+ msg = 'missing required arguments:'
+ assert msg in exc.value.args[0]['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_add_broadcast_domain_port_rest(mock_request, patch_ansible):
+ ''' test add broadcast domain port'''
+ args = dict(default_args('broadcast_domain'))
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['port_detail_e0b'],
+ SRR['port_detail_e0d'],
+ SRR['broadcast_domain_record1'], # get
+ SRR['empty_good'], # add e0b
+ SRR['empty_good'], # add e0d
+ SRR['end_of_sequence']
+ ]
+ my_obj = port_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is True
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_add_broadcast_domain_port_rest_idempotent(mock_request, patch_ansible):
+ ''' test add broadcast domain port'''
+ args = dict(default_args('broadcast_domain'))
+ args['resource_name'] = "domain2"
+ args['names'] = ["mohan9cluster2-01:e0a"]
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['port_detail_e0a'],
+ SRR['broadcast_domain_record1'], # get
+ SRR['end_of_sequence']
+ ]
+ my_obj = port_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is False
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_remove_broadcast_domain_port_rest(mock_request, patch_ansible):
+ ''' test remove broadcast domain port'''
+ args = dict(default_args('broadcast_domain'))
+ args['resource_name'] = "domain1"
+ args['names'] = ["mohan9cluster2-01:e0b", "mohan9cluster2-01:e0d"]
+ args['state'] = 'absent'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['port_detail_e0b'],
+ SRR['port_detail_e0d'],
+ SRR['broadcast_domain_record'], # get
+ SRR['empty_good'], # remove e0b and e0d
+ SRR['end_of_sequence']
+ ]
+ my_obj = port_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is True
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_remove_broadcast_domain_port_rest_idempotent(mock_request, patch_ansible):
+ ''' test remove broadcast domain port'''
+ args = dict(default_args('broadcast_domain'))
+ args['state'] = 'absent'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['port_detail_e0b'],
+ SRR['port_detail_e0d'],
+ SRR['broadcast_domain_record1'], # get
+ SRR['end_of_sequence']
+ ]
+ my_obj = port_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is False
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_module_error_get_ports_rest(mock_request, patch_ansible):
+ ''' test get port '''
+ args = dict(default_args('broadcast_domain'))
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['generic_error'], # Error in getting ports
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj = port_module()
+ print('Info: %s' % exc.value.args[0])
+ msg = 'calling: network/ethernet/ports: got Expected error.'
+ assert msg in exc.value.args[0]['msg']
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_module_error_get_broadcast_domain_ports_rest(mock_request, patch_ansible):
+ ''' test get broadcast domain '''
+ args = dict(default_args('broadcast_domain'))
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['port_detail_e0b'],
+ SRR['port_detail_e0d'],
+ SRR['generic_error'], # Error in getting broadcast domain ports
+ ]
+ my_obj = port_module()
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ msg = 'calling: network/ethernet/broadcast-domains: got Expected error.'
+ assert msg in exc.value.args[0]['msg']
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_module_error_add_broadcast_domain_ports_rest(mock_request, patch_ansible):
+ ''' test add broadcast domain ports '''
+ args = dict(default_args('broadcast_domain'))
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['port_detail_e0b'],
+ SRR['port_detail_e0d'],
+ SRR['broadcast_domain_record1'], # get
+ SRR['generic_error'], # Error in adding ports
+ ]
+ my_obj = port_module()
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ msg = 'got Expected error.'
+ assert msg in exc.value.args[0]['msg']
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_module_error_remove_broadcast_domain_ports_rest(mock_request, patch_ansible):
+ ''' test remove broadcast domain ports '''
+ args = dict(default_args('broadcast_domain'))
+ args['state'] = 'absent'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['port_detail_e0b'],
+ SRR['port_detail_e0d'],
+ SRR['broadcast_domain_record'], # get
+ SRR['generic_error'], # Error in removing ports
+ ]
+ my_obj = port_module()
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ msg = 'Error removing ports: calling: private/cli/network/port/broadcast-domain/remove-ports: got Expected error.'
+ assert msg in exc.value.args[0]['msg']
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_module_error_invalid_ports_rest(mock_request, patch_ansible):
+ ''' test remove broadcast domain ports '''
+ args = dict(default_args('broadcast_domain'))
+ args['names'] = ["mohan9cluster2-01e0b"]
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['generic_error']
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj = port_module()
+ print('Info: %s' % exc.value.args[0])
+ msg = 'Error: Invalid value specified for port: mohan9cluster2-01e0b, provide port name as node_name:port_name'
+ assert msg in exc.value.args[0]['msg']
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_module_error_broadcast_domain_missing_ports_rest(mock_request, patch_ansible):
+ ''' test get ports '''
+ args = dict(default_args('broadcast_domain'))
+ args['names'] = ["mohan9cluster2-01:e0l"]
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['zero_record']
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj = port_module()
+ print('Info: %s' % exc.value.args[0])
+ msg = 'Error: ports: mohan9cluster2-01:e0l not found'
+ assert msg in exc.value.args[0]['msg']
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_add_portset_port_iscsi_rest(mock_request, patch_ansible):
+ ''' test add portset port'''
+ args = dict(default_args('portset', 'iscsips', 'iscsi'))
+ args['names'] = ['lif_svm3_856']
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'], # get version
+ SRR['lif_svm3_856'],
+ SRR['iscsips'], # get portset
+ SRR['empty_good'], # add lif_svm3_856
+ SRR['end_of_sequence']
+ ]
+ my_obj = port_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is True
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_add_portset_port_iscsi_rest_idempotent(mock_request, patch_ansible):
+ ''' test add portset port'''
+ args = dict(default_args('portset', 'iscsips_updated', 'iscsi'))
+ args['names'] = ['lif_svm3_856']
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'], # get version
+ SRR['lif_svm3_856'],
+ SRR['iscsips_updated'], # get
+ SRR['end_of_sequence']
+ ]
+ my_obj = port_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is False
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_remove_portset_port_iscsi_rest(mock_request, patch_ansible):
+ ''' test remove portset port'''
+ args = dict(default_args('portset', 'iscsips_updated', 'iscsi'))
+ args['names'] = ['lif_svm3_856']
+ args['state'] = 'absent'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'], # get version
+ SRR['lif_svm3_856'],
+ SRR['iscsips_updated'],
+ SRR['empty_good'], # remove lif_svm3_856
+ SRR['end_of_sequence']
+ ]
+ my_obj = port_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is True
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_add_portset_port_mixed_rest(mock_request, patch_ansible):
+ ''' test add portset port'''
+ args = dict(default_args('portset', 'mixedps', 'mixed'))
+ args['names'] = ['lif_svm3_856', 'lif_svm3_681_1']
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'], # get version
+ SRR['lif_svm3_856'], # get lif_svm3_856 in ip
+ SRR['zero_record'], # lif_svm3_856 not found in fc
+ SRR['zero_record'], # lif_svm3_681_1 not found in ip
+ SRR['lif_svm3_681_1'], # get lif_svm3_681_1 in fc
+ SRR['mixedps'], # get portset
+ SRR['empty_good'], # Add both ip and fc to mixed portset
+ SRR['end_of_sequence']
+ ]
+ my_obj = port_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is True
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_module_error_get_portset_fetching_rest(mock_request, patch_ansible):
+ ''' test get port '''
+ args = dict(default_args('portset', 'iscsips_updated', 'mixed'))
+ args['names'] = ['lif_svm3_856']
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'], # get version
+ SRR['generic_error'], # Error in getting portset
+ SRR['generic_error']
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj = port_module()
+ print('Info: %s' % exc.value.args[0])
+ msg = 'Error fetching lifs details for lif_svm3_856: calling: network/ip/interfaces: got Expected error.'
+ assert msg in exc.value.args[0]['msg']
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_module_get_portset_fetching_portset_ip_rest(mock_request, patch_ansible):
+ ''' test get port ip'''
+ args = dict(default_args('portset', 'iscsips_updated', 'ip'))
+ args['names'] = ['lif_svm3_856']
+ del args['portset_type']
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'], # get version
+ SRR['lif_svm3_856'],
+ SRR['generic_error'],
+ SRR['iscsips_updated'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj = port_module()
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is False
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_module_get_portset_fetching_portset_fcp_rest(mock_request, patch_ansible):
+ ''' test get port fcp'''
+ args = dict(default_args('portset', 'mixedps_updated', 'fcp'))
+ args['names'] = ['lif_svm3_681_1']
+ del args['portset_type']
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'], # get version
+ SRR['generic_error'],
+ SRR['lif_svm3_681_1'],
+ SRR['mixedps_updated'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj = port_module()
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is False
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_module_error_get_portset_rest(mock_request, patch_ansible):
+ ''' test get portset '''
+ args = dict(default_args('portset', 'iscsips_updated', 'iscsi'))
+ args['names'] = ['lif_svm3_856']
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'], # get version
+ SRR['lif_svm3_856'],
+ SRR['generic_error'], # Error in getting portset
+ ]
+ my_obj = port_module()
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ msg = 'calling: protocols/san/portsets: got Expected error'
+ assert msg in exc.value.args[0]['msg']
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_module_error_get_portset_error_rest(mock_request, patch_ansible):
+ ''' test get portset '''
+ args = dict(default_args('portset', 'iscsips_updated', 'iscsi'))
+ args['names'] = ['lif_svm3_856']
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'], # get version
+ SRR['lif_svm3_856'],
+ SRR['zero_record'],
+ SRR['generic_error'], # Error in getting portset
+ ]
+ my_obj = port_module()
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ msg = "Error: Portset 'iscsips_updated' does not exist"
+ assert msg in exc.value.args[0]['msg']
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_module_error_get_portset_missing_rest(mock_request, patch_ansible):
+ ''' test get portset '''
+ args = dict(default_args('portset', 'iscsips_updated', 'iscsi'))
+ args['names'] = ['lif_svm3_856']
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'], # get version
+ SRR['zero_record'],
+ SRR['generic_error'], # Error in getting portset
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj = port_module()
+ print('Info: %s' % exc.value.args[0])
+ msg = "Error: lifs: lif_svm3_856 of type iscsi not found in vserver svm3"
+ assert msg in exc.value.args[0]['msg']
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_module_get_portset_missing_state_absent_rest(mock_request, patch_ansible):
+ ''' test get portset '''
+ args = dict(default_args('portset', 'iscsips_updated', 'iscsi'))
+ args['names'] = ['lif_svm3_856']
+ args['state'] = 'absent'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'], # get version
+ SRR['lif_svm3_856'],
+ SRR['zero_record'],
+ SRR['end_of_sequence']
+ ]
+ my_obj = port_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is False
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_module_error_add_portset_ports_rest(mock_request, patch_ansible):
+ ''' test add portset ports '''
+ args = dict(default_args('portset', 'iscsips', 'iscsi'))
+ args['names'] = ['lif_svm3_856']
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'], # get version
+ SRR['lif_svm3_856'],
+ SRR['iscsips'],
+ SRR['generic_error'], # Error in adding ports
+ ]
+ my_obj = port_module()
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ msg = 'calling: protocols/san/portsets/52e31a9d-72e2-11ec-95ea-005056b3b297/interfaces: got Expected error.'
+ assert msg in exc.value.args[0]['msg']
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_module_error_remove_portset_ports_rest(mock_request, patch_ansible):
+ ''' test remove broadcast domain ports '''
+ args = dict(default_args('portset', 'iscsips_updated', 'iscsi'))
+ args['names'] = ['lif_svm3_856']
+ args['state'] = 'absent'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'], # get version
+ SRR['lif_svm3_856'],
+ SRR['iscsips_updated'],
+ SRR['generic_error'], # Error in removing ports
+ ]
+ my_obj = port_module()
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ msg = 'calling: protocols/san/portsets/52e31a9d-72e2-ec11-95ea-005056b3b298/interfaces/6a82e94a-72da-11ec-95ea-005056b3b297: got Expected error.'
+ assert msg in exc.value.args[0]['msg']
+ assert_no_warnings()
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_portset.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_portset.py
new file mode 100644
index 000000000..2e68e58c9
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_portset.py
@@ -0,0 +1,390 @@
+# (c) 2018-2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests for ONTAP Ansible module: na_ontap_portset'''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+import sys
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ patch_ansible, create_module, create_and_apply, expect_and_capture_ansible_exception
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke,\
+ register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_portset \
+ import NetAppONTAPPortset as my_module # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not be available')
+
+
+DEFAULT_ARGS = {
+ 'state': 'present',
+ 'name': 'test',
+ 'type': 'mixed',
+ 'vserver': 'ansible_test',
+ 'ports': ['a1', 'a2'],
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'use_rest': 'never'
+}
+
+
+portset_info = {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'portset-info': {
+ 'portset-name': 'test',
+ 'vserver': 'ansible_test',
+ 'portset-type': 'mixed',
+ 'portset-port-total': '2',
+ 'portset-port-info': [
+ {'portset-port-name': 'a1'},
+ {'portset-port-name': 'a2'}
+ ]
+ }
+ }
+}
+
+
+ZRR = zapi_responses({
+ 'portset_info': build_zapi_response(portset_info)
+})
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ # with python 2.6, dictionaries are not ordered
+ fragments = ["missing required arguments:", "hostname", "name", "vserver"]
+ error = create_module(my_module, {}, fail=True)['msg']
+ for fragment in fragments:
+ assert fragment in error
+
+
+def test_ensure_portset_get_called():
+ ''' a more interesting test '''
+ register_responses([
+ ('portset-get-iter', ZRR['empty'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ portset = my_obj.portset_get()
+ assert portset is None
+
+
+def test_create_portset():
+ ''' Test successful create '''
+ register_responses([
+ ('portset-get-iter', ZRR['empty']),
+ ('portset-create', ZRR['success']),
+ ('portset-add', ZRR['success']),
+ ('portset-add', ZRR['success'])
+ ])
+ assert create_and_apply(my_module, DEFAULT_ARGS)['changed']
+
+
+def test_modify_ports():
+ ''' Test modify_portset method '''
+ register_responses([
+ ('portset-get-iter', ZRR['portset_info']),
+ ('portset-add', ZRR['success']),
+ ('portset-add', ZRR['success']),
+ ('portset-remove', ZRR['success']),
+ ('portset-remove', ZRR['success'])
+ ])
+ args = {'ports': ['l1', 'l2']}
+ assert create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_delete_portset():
+ ''' Test successful delete '''
+ register_responses([
+ ('portset-get-iter', ZRR['portset_info']),
+ ('portset-destroy', ZRR['success'])
+ ])
+ args = {'state': 'absent'}
+ assert create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_error_type_create():
+ register_responses([
+ ('portset-get-iter', ZRR['empty'])
+ ])
+ DEFAULT_ARGS_COPY = DEFAULT_ARGS.copy()
+ del DEFAULT_ARGS_COPY['type']
+ error = 'Error: Missing required parameter for create (type)'
+ assert error in create_and_apply(my_module, DEFAULT_ARGS_COPY, fail=True)['msg']
+
+
+def test_if_all_methods_catch_exception():
+ register_responses([
+ ('portset-get-iter', ZRR['error']),
+ ('portset-create', ZRR['error']),
+ ('portset-add', ZRR['error']),
+ ('portset-remove', ZRR['error']),
+ ('portset-destroy', ZRR['error'])
+ ])
+ portset_obj = create_module(my_module, DEFAULT_ARGS)
+
+ error = expect_and_capture_ansible_exception(portset_obj.portset_get, 'fail')['msg']
+ assert 'Error fetching portset' in error
+
+ error = expect_and_capture_ansible_exception(portset_obj.create_portset, 'fail')['msg']
+ assert 'Error creating portse' in error
+
+ error = expect_and_capture_ansible_exception(portset_obj.modify_port, 'fail', 'a1', 'portset-add', 'adding')['msg']
+ assert 'Error adding port in portset' in error
+
+ error = expect_and_capture_ansible_exception(portset_obj.modify_port, 'fail', 'a2', 'portset-remove', 'removing')['msg']
+ assert 'Error removing port in portset' in error
+
+ error = expect_and_capture_ansible_exception(portset_obj.delete_portset, 'fail')['msg']
+ assert 'Error deleting portset' in error
+
+
+SRR = rest_responses({
+ 'mixed_portset_info': (200, {"records": [{
+ "interfaces": [
+ {
+ "fc": {
+ "name": "lif_1",
+ "uuid": "d229cc03"
+ }
+ },
+ {
+ "ip": {
+ "name": "lif_2",
+ "uuid": "1cd8a442"
+ }
+ }
+ ],
+ "name": "mixed_ps",
+ "protocol": "mixed",
+ "uuid": "312aa85b"
+ }], "num_records": 1}, None),
+ 'fc_portset_info': (200, {"records": [{
+ "interfaces": [
+ {
+ "fc": {
+ "name": "fc_1",
+ "uuid": "3a09cd42"
+ }
+ },
+ {
+ "fc": {
+ "name": "fc_2",
+ "uuid": "d24e03c6"
+ }
+ }
+ ],
+ "name": "fc_ps",
+ "protocol": "fcp",
+ "uuid": "5056b3b297"
+ }], "num_records": 1}, None),
+ 'lif_1': (200, {
+ "num_records": 1,
+ "records": [{"uuid": "d229cc03"}]
+ }, None),
+ 'lif_2': (200, {
+ "num_records": 1,
+ "records": [{"uuid": "d24e03c6"}]
+ }, None),
+ 'fc_1': (200, {
+ "num_records": 1,
+ "records": [{"uuid": "3a09cd42"}]
+ }, None),
+ 'fc_2': (200, {
+ "num_records": 1,
+ "records": [{"uuid": "1cd8b542"}]
+ }, None)
+})
+
+
+def test_create_portset_rest():
+ ''' Test successful create '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'protocols/san/portsets', SRR['empty_records']),
+ ('GET', 'network/ip/interfaces', SRR['empty_records']),
+ ('GET', 'network/fc/interfaces', SRR['lif_1']),
+ ('GET', 'network/ip/interfaces', SRR['lif_2']),
+ ('GET', 'network/fc/interfaces', SRR['empty_records']),
+ ('POST', 'protocols/san/portsets', SRR['success'])
+ ])
+ args = {'use_rest': 'always'}
+ assert create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_create_portset_idempotency_rest():
+ ''' Test successful create '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'protocols/san/portsets', SRR['mixed_portset_info'])
+ ])
+ args = {'use_rest': 'always', "ports": ["lif_1", "lif_2"]}
+ assert create_and_apply(my_module, DEFAULT_ARGS, args)['changed'] is False
+
+
+def test_modify_remove_ports_rest():
+ ''' Test modify_portset method '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'protocols/san/portsets', SRR['mixed_portset_info']),
+ ('DELETE', 'protocols/san/portsets/312aa85b/interfaces/1cd8a442', SRR['success'])
+ ])
+ args = {'use_rest': 'always', "ports": ["lif_1"]}
+ assert create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_modify_add_ports_rest():
+ ''' Test modify_portset method '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'protocols/san/portsets', SRR['mixed_portset_info']),
+ ('GET', 'network/ip/interfaces', SRR['empty_records']),
+ ('GET', 'network/fc/interfaces', SRR['fc_1']),
+ ('POST', 'protocols/san/portsets/312aa85b/interfaces', SRR['success'])
+ ])
+ args = {'use_rest': 'always', "ports": ["lif_1", "lif_2", "fc_1"]}
+ assert create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_delete_portset_rest():
+ ''' Test modify_portset method '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'protocols/san/portsets', SRR['mixed_portset_info']),
+ ('DELETE', 'protocols/san/portsets/312aa85b', SRR['success'])
+ ])
+ args = {'use_rest': 'always', 'state': 'absent', 'ports': ['lif_1', 'lif_2']}
+ assert create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_get_portset_error_rest():
+ ''' Test modify_portset method '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'protocols/san/portsets', SRR['generic_error'])
+ ])
+ args = {'use_rest': 'always', "ports": ["lif_1", "lif_2", "fc_1"]}
+ error = 'Error fetching portset'
+ assert error in create_and_apply(my_module, DEFAULT_ARGS, args, 'fail')['msg']
+
+
+def test_create_portset_error_rest():
+ ''' Test modify_portset method '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'protocols/san/portsets', SRR['empty_records']),
+ ('POST', 'protocols/san/portsets', SRR['generic_error'])
+ ])
+ args = {'use_rest': 'always', "ports": []}
+ error = 'Error creating portset'
+ assert error in create_and_apply(my_module, DEFAULT_ARGS, args, 'fail')['msg']
+
+
+def test_delete_portset_error_rest():
+ ''' Test modify_portset method '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'protocols/san/portsets', SRR['mixed_portset_info']),
+ ('DELETE', 'protocols/san/portsets/312aa85b', SRR['generic_error'])
+ ])
+ args = {'use_rest': 'always', 'state': 'absent', "ports": ["lif_1", "lif_2"]}
+ error = 'Error deleting portset'
+ assert error in create_and_apply(my_module, DEFAULT_ARGS, args, 'fail')['msg']
+
+
+def test_add_portset_error_rest():
+ ''' Test modify_portset method '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'protocols/san/portsets', SRR['mixed_portset_info']),
+ ('GET', 'network/ip/interfaces', SRR['empty_records']),
+ ('GET', 'network/fc/interfaces', SRR['fc_1']),
+ ('POST', 'protocols/san/portsets/312aa85b/interfaces', SRR['generic_error'])
+ ])
+ args = {'use_rest': 'always', "ports": ["lif_1", "lif_2", "fc_1"]}
+ error = "Error adding port in portset"
+ assert error in create_and_apply(my_module, DEFAULT_ARGS, args, 'fail')['msg']
+
+
+def test_remove_portset_error_rest():
+ ''' Test modify_portset method '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'protocols/san/portsets', SRR['mixed_portset_info']),
+ ('DELETE', 'protocols/san/portsets/312aa85b/interfaces/1cd8a442', SRR['generic_error'])
+ ])
+ args = {'use_rest': 'always', "ports": ["lif_1"]}
+ error = "Error removing port in portset"
+ assert error in create_and_apply(my_module, DEFAULT_ARGS, args, 'fail')['msg']
+
+
+def test_add_ip_port_to_fc_error_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'protocols/san/portsets', SRR['fc_portset_info']),
+ ('GET', 'network/fc/interfaces', SRR['empty_records'])
+ ])
+ args = {'use_rest': 'always', "type": "fcp", "ports": ["fc_1", "fc_2", "lif_2"]}
+ error = 'Error: lifs: lif_2 of type fcp not found in vserver'
+ assert error in create_and_apply(my_module, DEFAULT_ARGS, args, 'fail')['msg']
+
+
+def test_get_lif_error_rest():
+ ''' Test modify_portset method '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'protocols/san/portsets', SRR['mixed_portset_info']),
+ ('GET', 'network/ip/interfaces', SRR['generic_error']),
+ ('GET', 'network/fc/interfaces', SRR['generic_error'])
+ ])
+ args = {'use_rest': 'always', "ports": ["lif_1", "lif_2", "fc_1"]}
+ error = "Error fetching lifs details for fc_1"
+ assert error in create_and_apply(my_module, DEFAULT_ARGS, args, 'fail')['msg']
+
+
+def test_try_to_modify_protocol_error_rest():
+ ''' Test modify_portset method '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'protocols/san/portsets', SRR['mixed_portset_info'])
+ ])
+ args = {'use_rest': 'always', "type": "iscsi", "ports": ["lif_1", "lif_2"]}
+ error = "modify protocol(type) not supported"
+ assert error in create_and_apply(my_module, DEFAULT_ARGS, args, 'fail')['msg']
+
+
+def test_invalid_value_port_rest():
+ ''' Test invalid error '''
+ args = {'use_rest': 'always', "type": "iscsi", "ports": ["lif_1", ""]}
+ error = "Error: invalid value specified for ports"
+ assert error in create_module(my_module, DEFAULT_ARGS, args, fail=True)['msg']
+
+
+def test_module_ontap_9_9_0_rest_auto():
+ ''' Test fall back to ZAPI '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0'])
+ ])
+ args = {'use_rest': 'auto'}
+ assert create_module(my_module, DEFAULT_ARGS, args).use_rest is False
+
+
+def test_module_ontap_9_9_0_rest_always():
+ ''' Test error when rest below 9.9.1 '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0'])
+ ])
+ args = {'use_rest': 'always'}
+ msg = "Error: REST requires ONTAP 9.9.1 or later for portset APIs."
+ assert msg in create_module(my_module, DEFAULT_ARGS, args, fail=True)['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_publickey.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_publickey.py
new file mode 100644
index 000000000..d72d8c8eb
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_publickey.py
@@ -0,0 +1,471 @@
+# (c) 2018-2021, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test for ONTAP publickey Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+import sys
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible, assert_warning_was_raised, assert_no_warnings, print_warnings
+
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_publickey \
+ import NetAppOntapPublicKey as my_module, main as uut_main # module under test
+
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+
+def default_args():
+ return {
+ 'state': 'present',
+ 'hostname': '10.10.10.10',
+ 'username': 'admin',
+ 'https': 'true',
+ 'validate_certs': 'false',
+ 'password': 'password',
+ 'account': 'user123',
+ 'public_key': '161245ASDF',
+ 'vserver': 'vserver',
+ }
+
+
+# REST API canned responses when mocking send_request
+SRR = {
+ # common responses
+ 'is_rest': (200, dict(version=dict(generation=9, major=9, minor=0, full='dummy')), None),
+ 'is_rest_9_6': (200, dict(version=dict(generation=9, major=6, minor=0, full='dummy')), None),
+ 'is_rest_9_8': (200, dict(version=dict(generation=9, major=8, minor=0, full='dummy')), None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'zero_record': (200, dict(records=[], num_records=0), None),
+ 'one_record_uuid': (200, dict(records=[dict(uuid='a1b2c3')], num_records=1), None),
+ 'end_of_sequence': (500, None, "Unexpected call to send_request"),
+ 'generic_error': (400, None, "Expected error"),
+ 'one_pk_record': (200, {
+ "records": [{
+ 'account': dict(name='user123'),
+ 'owner': dict(uuid='98765'),
+ 'public_key': '161245ASDF',
+ 'index': 12,
+ 'comment': 'comment_123',
+ }],
+ 'num_records': 1
+ }, None),
+ 'two_pk_records': (200, {
+ "records": [{
+ 'account': dict(name='user123'),
+ 'owner': dict(uuid='98765'),
+ 'public_key': '161245ASDF',
+ 'index': 12,
+ 'comment': 'comment_123',
+ },
+ {
+ 'account': dict(name='user123'),
+ 'owner': dict(uuid='98765'),
+ 'public_key': '161245ASDF',
+ 'index': 13,
+ 'comment': 'comment_123',
+ }],
+ 'num_records': 2
+ }, None)
+}
+
+
+def test_module_fail_when_required_args_missing(patch_ansible):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args(dict(hostname=''))
+ my_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+ msg = 'missing required arguments: account'
+ assert msg == exc.value.args[0]['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_ensure_get_called(mock_request, patch_ansible):
+ ''' test get'''
+ args = dict(default_args())
+ args['index'] = 12
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['one_pk_record'], # get
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is False
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_ensure_create_called(mock_request, patch_ansible):
+ ''' test get'''
+ args = dict(default_args())
+ args['use_rest'] = 'auto'
+ args['index'] = 13
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['zero_record'], # get
+ SRR['empty_good'], # create
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is True
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_ensure_create_idempotent(mock_request, patch_ansible):
+ ''' test get'''
+ args = dict(default_args())
+ args['use_rest'] = 'always'
+ args['index'] = 12
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['one_pk_record'], # get
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is False
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_ensure_create_always_called(mock_request, patch_ansible):
+ ''' test get'''
+ args = dict(default_args())
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['empty_good'], # create
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is True
+ print_warnings()
+ assert_warning_was_raised('Module is not idempotent if index is not provided with state=present.')
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_ensure_modify_called(mock_request, patch_ansible):
+ ''' test get'''
+ args = dict(default_args())
+ args['index'] = 12
+ args['comment'] = 'new_comment'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['one_pk_record'], # get
+ SRR['empty_good'], # modify
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is True
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_ensure_delete_called(mock_request, patch_ansible):
+ ''' test get'''
+ args = dict(default_args())
+ args['use_rest'] = 'auto'
+ args['index'] = 12
+ args['state'] = 'absent'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['one_pk_record'], # get
+ SRR['empty_good'], # delete
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is True
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_ensure_delete_idempotent(mock_request, patch_ansible):
+ ''' test get'''
+ args = dict(default_args())
+ args['use_rest'] = 'always'
+ args['index'] = 12
+ args['state'] = 'absent'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['zero_record'], # get
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is False
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_ensure_delete_failed_N_records(mock_request, patch_ansible):
+ ''' test get'''
+ args = dict(default_args())
+ args['use_rest'] = 'auto'
+ args['state'] = 'absent'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['two_pk_records'], # get
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ msg = 'Error: index is required as more than one public_key exists for user account user123'
+ assert msg in exc.value.args[0]['msg']
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_ensure_delete_succeeded_N_records(mock_request, patch_ansible):
+ ''' test get'''
+ args = dict(default_args())
+ args['use_rest'] = 'auto'
+ args['state'] = 'absent'
+ args['delete_all'] = True
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['two_pk_records'], # get
+ SRR['empty_good'], # delete
+ SRR['empty_good'], # delete
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is True
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_ensure_delete_succeeded_N_records_cluster(mock_request, patch_ansible):
+ ''' test get'''
+ args = dict(default_args())
+ args['use_rest'] = 'auto'
+ args['state'] = 'absent'
+ args['delete_all'] = True
+ args['vserver'] = None # cluster scope
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['two_pk_records'], # get
+ SRR['empty_good'], # delete
+ SRR['empty_good'], # delete
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ uut_main()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is True
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_negative_extra_record(mock_request, patch_ansible):
+ ''' test get'''
+ args = dict(default_args())
+ args['use_rest'] = 'auto'
+ args['state'] = 'present'
+ args['index'] = 14
+ args['vserver'] = None # cluster scope
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['two_pk_records'], # get
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ uut_main()
+ print('Info: %s' % exc.value.args[0])
+ msg = 'Error in get_public_key: calling: security/authentication/publickeys: unexpected response'
+ assert msg in exc.value.args[0]['msg']
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_negative_extra_arg_in_modify(mock_request, patch_ansible):
+ ''' test get'''
+ args = dict(default_args())
+ args['use_rest'] = 'auto'
+ args['state'] = 'present'
+ args['index'] = 14
+ args['vserver'] = None # cluster scope
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['one_pk_record'], # get
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ uut_main()
+ print('Info: %s' % exc.value.args[0])
+ msg = "Error: attributes not supported in modify: {'index': 14}"
+ assert msg in exc.value.args[0]['msg']
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_negative_empty_body_in_modify(mock_request, patch_ansible):
+ ''' test get'''
+ args = dict(default_args())
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['end_of_sequence']
+ ]
+ current = dict(owner=dict(uuid=''), account=dict(name=''), index=0)
+ modify = {}
+ my_obj = my_module()
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.modify_public_key(current, modify)
+ print('Info: %s' % exc.value.args[0])
+ msg = 'Error: nothing to change - modify called with: {}'
+ assert msg in exc.value.args[0]['msg']
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_negative_create_called(mock_request, patch_ansible):
+ ''' test get'''
+ args = dict(default_args())
+ args['use_rest'] = 'auto'
+ args['index'] = 13
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['zero_record'], # get
+ SRR['generic_error'], # create
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ msg = 'Error in create_public_key: Expected error'
+ assert msg in exc.value.args[0]['msg']
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_negative_delete_called(mock_request, patch_ansible):
+ ''' test get'''
+ args = dict(default_args())
+ args['use_rest'] = 'auto'
+ args['index'] = 12
+ args['state'] = 'absent'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['one_pk_record'], # get
+ SRR['generic_error'], # delete
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ msg = 'Error in delete_public_key: Expected error'
+ assert msg in exc.value.args[0]['msg']
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_negative_modify_called(mock_request, patch_ansible):
+ ''' test get'''
+ args = dict(default_args())
+ args['use_rest'] = 'auto'
+ args['index'] = 12
+ args['comment'] = 'change_me'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['one_pk_record'], # get
+ SRR['generic_error'], # modify
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ msg = 'Error in modify_public_key: Expected error'
+ assert msg in exc.value.args[0]['msg']
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_negative_older_version(mock_request, patch_ansible):
+ ''' test get'''
+ args = dict(default_args())
+ args['use_rest'] = 'auto'
+ args['index'] = 12
+ args['comment'] = 'change_me'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_6'], # get version
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj = my_module()
+ print('Info: %s' % exc.value.args[0])
+ msg = 'Error: na_ontap_publickey only supports REST, and requires ONTAP 9.7.0 or later. Found: 9.6.0'
+ assert msg in exc.value.args[0]['msg']
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_negative_zapi_only(mock_request, patch_ansible):
+ ''' test get'''
+ args = dict(default_args())
+ args['use_rest'] = 'never'
+ args['index'] = 12
+ args['comment'] = 'change_me'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_6'], # get version
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj = my_module()
+ print('Info: %s' % exc.value.args[0])
+ msg = 'Error: REST is required for this module, found: "use_rest: never"'
+ assert msg in exc.value.args[0]['msg']
+ assert_no_warnings()
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_qos_adaptive_policy_group.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_qos_adaptive_policy_group.py
new file mode 100644
index 000000000..f568ed17a
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_qos_adaptive_policy_group.py
@@ -0,0 +1,313 @@
+# (c) 2018, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test template for ONTAP Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat import unittest
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch, Mock
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_qos_adaptive_policy_group \
+ import NetAppOntapAdaptiveQosPolicyGroup as qos_policy_group_module # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+class MockONTAPConnection(object):
+ ''' mock server connection to ONTAP host '''
+
+ def __init__(self, kind=None, data=None):
+ ''' save arguments '''
+ self.kind = kind
+ self.params = data
+ self.xml_in = None
+ self.xml_out = None
+
+ def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
+ ''' mock invoke_successfully returning xml data '''
+ self.xml_in = xml
+ if self.kind == 'policy':
+ xml = self.build_policy_group_info(self.params)
+ if self.kind == 'error':
+ error = netapp_utils.zapi.NaApiError('test', 'error')
+ raise error
+ self.xml_out = xml
+ return xml
+
+ @staticmethod
+ def build_policy_group_info(vol_details):
+ ''' build xml data for volume-attributes '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ attributes = {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'qos-adaptive-policy-group-info': {
+ 'absolute-min-iops': '50IOPS',
+ 'expected-iops': '150IOPS/TB',
+ 'peak-iops': '220IOPS/TB',
+ 'peak-iops-allocation': 'used_space',
+ 'num-workloads': 0,
+ 'pgid': 6941,
+ 'policy-group': vol_details['name'],
+ 'vserver': vol_details['vserver']
+ }
+ }
+ }
+ xml.translate_struct(attributes)
+ return xml
+
+
+class TestMyModule(unittest.TestCase):
+ ''' a group of related Unit Tests '''
+
+ def setUp(self):
+ self.mock_policy_group = {
+ 'name': 'policy_1',
+ 'vserver': 'policy_vserver',
+ 'absolute_min_iops': '50IOPS',
+ 'expected_iops': '150IOPS/TB',
+ 'peak_iops': '220IOPS/TB',
+ 'peak_iops_allocation': 'used_space'
+ }
+
+ def mock_args(self):
+ return {
+ 'name': self.mock_policy_group['name'],
+ 'vserver': self.mock_policy_group['vserver'],
+ 'absolute_min_iops': '50IOPS',
+ 'expected_iops': '150IOPS/TB',
+ 'peak_iops': '220IOPS/TB',
+ 'peak_iops_allocation': 'used_space',
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'https': 'False'
+ }
+
+ def get_policy_group_mock_object(self, kind=None):
+ """
+ Helper method to return an na_ontap_volume object
+ :param kind: passes this param to MockONTAPConnection()
+ :return: na_ontap_volume object
+ """
+ policy_obj = qos_policy_group_module()
+ policy_obj.autosupport_log = Mock(return_value=None)
+ policy_obj.cluster = Mock()
+ policy_obj.cluster.invoke_successfully = Mock()
+ if kind is None:
+ policy_obj.server = MockONTAPConnection()
+ else:
+ policy_obj.server = MockONTAPConnection(kind=kind, data=self.mock_policy_group)
+ return policy_obj
+
+ def test_module_fail_when_required_args_missing(self):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ qos_policy_group_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+ def test_get_nonexistent_policy(self):
+ ''' Test if get_policy_group returns None for non-existent policy_group '''
+ set_module_args(self.mock_args())
+ result = self.get_policy_group_mock_object().get_policy_group()
+ assert result is None
+
+ def test_get_existing_policy_group(self):
+ ''' Test if get_policy_group returns details for existing policy_group '''
+ set_module_args(self.mock_args())
+ result = self.get_policy_group_mock_object('policy').get_policy_group()
+ assert result['name'] == self.mock_policy_group['name']
+ assert result['vserver'] == self.mock_policy_group['vserver']
+
+ def test_create_error_missing_param(self):
+ ''' Test if create throws an error if name is not specified'''
+ data = self.mock_args()
+ del data['name']
+ set_module_args(data)
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_policy_group_mock_object('policy').create_policy_group()
+ msg = 'missing required arguments: name'
+ assert exc.value.args[0]['msg'] == msg
+
+ def test_successful_create(self):
+ ''' Test successful create '''
+ data = self.mock_args()
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_policy_group_mock_object().apply()
+ assert exc.value.args[0]['changed']
+
+ def test_create_idempotency(self):
+ ''' Test create idempotency '''
+ set_module_args(self.mock_args())
+ obj = self.get_policy_group_mock_object('policy')
+ with pytest.raises(AnsibleExitJson) as exc:
+ obj.apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_qos_adaptive_policy_group.NetAppOntapAdaptiveQosPolicyGroup.get_policy_group')
+ def test_create_error(self, get_policy_group):
+ ''' Test create error '''
+ set_module_args(self.mock_args())
+ get_policy_group.side_effect = [
+ None
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_policy_group_mock_object('error').apply()
+ assert exc.value.args[0]['msg'] == 'Error creating adaptive qos policy group policy_1: NetApp API failed. Reason - test:error'
+
+ def test_successful_delete(self):
+ ''' Test delete existing volume '''
+ data = self.mock_args()
+ data['state'] = 'absent'
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_policy_group_mock_object('policy').apply()
+ assert exc.value.args[0]['changed']
+
+ def test_delete_idempotency(self):
+ ''' Test delete idempotency '''
+ data = self.mock_args()
+ data['state'] = 'absent'
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_policy_group_mock_object().apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_qos_adaptive_policy_group.NetAppOntapAdaptiveQosPolicyGroup.get_policy_group')
+ def test_delete_error(self, get_policy_group):
+ ''' Test create idempotency'''
+ data = self.mock_args()
+ data['state'] = 'absent'
+ set_module_args(data)
+ current = {
+ 'absolute_min_iops': '50IOPS',
+ 'expected_iops': '150IOPS/TB',
+ 'peak_iops': '220IOPS/TB',
+ 'peak_iops_allocation': 'used_space',
+ 'name': 'policy_1',
+ 'vserver': 'policy_vserver'
+ }
+ get_policy_group.side_effect = [
+ current
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_policy_group_mock_object('error').apply()
+ assert exc.value.args[0]['msg'] == 'Error deleting adaptive qos policy group policy_1: NetApp API failed. Reason - test:error'
+
+ def test_successful_modify_expected_iops(self):
+ ''' Test successful modify expected iops '''
+ data = self.mock_args()
+ data['expected_iops'] = '175IOPS'
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_policy_group_mock_object('policy').apply()
+ assert exc.value.args[0]['changed']
+
+ def test_modify_expected_iops_idempotency(self):
+ ''' Test modify idempotency '''
+ data = self.mock_args()
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_policy_group_mock_object('policy').apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_qos_adaptive_policy_group.NetAppOntapAdaptiveQosPolicyGroup.get_policy_group')
+ def test_modify_error(self, get_policy_group):
+ ''' Test create idempotency '''
+ data = self.mock_args()
+ data['expected_iops'] = '175IOPS'
+ set_module_args(data)
+ current = {
+ 'absolute_min_iops': '50IOPS',
+ 'expected_iops': '150IOPS/TB',
+ 'peak_iops': '220IOPS/TB',
+ 'peak_iops_allocation': 'used_space',
+ 'name': 'policy_1',
+ 'vserver': 'policy_vserver'
+ }
+ get_policy_group.side_effect = [
+ current
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_policy_group_mock_object('error').apply()
+ assert exc.value.args[0]['msg'] == 'Error modifying adaptive qos policy group policy_1: NetApp API failed. Reason - test:error'
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_qos_adaptive_policy_group.NetAppOntapAdaptiveQosPolicyGroup.get_policy_group')
+ def test_rename(self, get_policy_group):
+ ''' Test rename idempotency '''
+ data = self.mock_args()
+ data['name'] = 'policy_2'
+ data['from_name'] = 'policy_1'
+ set_module_args(data)
+ current = {
+ 'absolute_min_iops': '50IOPS',
+ 'expected_iops': '150IOPS/TB',
+ 'peak_iops': '220IOPS/TB',
+ 'peak_iops_allocation': 'used_space',
+ 'name': 'policy_1',
+ 'vserver': 'policy_vserver'
+ }
+ get_policy_group.side_effect = [
+ None,
+ current
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_policy_group_mock_object('policy').apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_qos_adaptive_policy_group.NetAppOntapAdaptiveQosPolicyGroup.get_policy_group')
+ def test_rename_idempotency(self, get_policy_group):
+ ''' Test rename idempotency '''
+ data = self.mock_args()
+ data['name'] = 'policy_1'
+ data['from_name'] = 'policy_1'
+ current = {
+ 'absolute_min_iops': '50IOPS',
+ 'expected_iops': '150IOPS/TB',
+ 'peak_iops': '220IOPS/TB',
+ 'peak_iops_allocation': 'used_space',
+ 'name': 'policy_1',
+ 'vserver': 'policy_vserver'
+ }
+ get_policy_group.side_effect = [
+ current,
+ current
+ ]
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_policy_group_mock_object('policy').apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_qos_adaptive_policy_group.NetAppOntapAdaptiveQosPolicyGroup.get_policy_group')
+ def test_rename_error(self, get_policy_group):
+ ''' Test create idempotency '''
+ data = self.mock_args()
+ data['from_name'] = 'policy_1'
+ data['name'] = 'policy_2'
+ set_module_args(data)
+ current = {
+ 'absolute_min_iops': '50IOPS',
+ 'expected_iops': '150IOPS/TB',
+ 'peak_iops': '220IOPS/TB',
+ 'peak_iops_allocation': 'used_space',
+ 'is_shared': 'true',
+ 'name': 'policy_1',
+ 'vserver': 'policy_vserver'
+ }
+ get_policy_group.side_effect = [
+ None,
+ current
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_policy_group_mock_object('error').apply()
+ assert exc.value.args[0]['msg'] == 'Error renaming adaptive qos policy group policy_1: NetApp API failed. Reason - test:error'
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_qos_policy_group.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_qos_policy_group.py
new file mode 100644
index 000000000..c14b13151
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_qos_policy_group.py
@@ -0,0 +1,578 @@
+# (c) 2018-2023, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test template for ONTAP Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+import sys
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible, create_module, create_and_apply, expect_and_capture_ansible_exception
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke,\
+ register_responses
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_qos_policy_group \
+ import NetAppOntapQosPolicyGroup as qos_policy_group_module # module under test
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_responses
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+
+DEFAULT_ARGS = {
+ 'name': 'policy_1',
+ 'vserver': 'policy_vserver',
+ 'max_throughput': '800KB/s,800IOPS',
+ 'is_shared': True,
+ 'min_throughput': '100IOPS',
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'https': 'True',
+ 'use_rest': 'never'
+}
+
+
+qos_policy_group_info = {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'qos-policy-group-info': {
+ 'is-shared': 'true',
+ 'max-throughput': '800KB/s,800IOPS',
+ 'min-throughput': '100IOPS',
+ 'num-workloads': 0,
+ 'pgid': 8690,
+ 'policy-group': 'policy_1',
+ 'vserver': 'policy_vserver'
+ }
+ }
+}
+
+
+ZRR = zapi_responses({
+ 'qos_policy_info': build_zapi_response(qos_policy_group_info)
+})
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ qos_policy_group_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+
+def test_get_nonexistent_policy():
+ ''' Test if get_policy_group returns None for non-existent policy_group '''
+ register_responses([
+ ('qos-policy-group-get-iter', ZRR['empty'])
+ ])
+ qos_policy_obj = create_module(qos_policy_group_module, DEFAULT_ARGS)
+ result = qos_policy_obj.get_policy_group()
+ assert result is None
+
+
+def test_get_existing_policy_group():
+ ''' Test if get_policy_group returns details for existing policy_group '''
+ register_responses([
+ ('qos-policy-group-get-iter', ZRR['qos_policy_info'])
+ ])
+ qos_policy_obj = create_module(qos_policy_group_module, DEFAULT_ARGS)
+ result = qos_policy_obj.get_policy_group()
+ assert result['name'] == DEFAULT_ARGS['name']
+ assert result['vserver'] == DEFAULT_ARGS['vserver']
+
+
+def test_create_error_missing_param():
+ ''' Test if create throws an error if name is not specified'''
+ DEFAULT_ARGS_COPY = DEFAULT_ARGS.copy()
+ del DEFAULT_ARGS_COPY['name']
+ error = create_module(qos_policy_group_module, DEFAULT_ARGS_COPY, fail=True)['msg']
+ assert 'missing required arguments: name' in error
+
+
+def test_error_if_fixed_qos_options_present():
+ ''' Test hrows an error if fixed_qos_options is specified in ZAPI'''
+ DEFAULT_ARGS_COPY = DEFAULT_ARGS.copy()
+ del DEFAULT_ARGS_COPY['max_throughput']
+ del DEFAULT_ARGS_COPY['min_throughput']
+ del DEFAULT_ARGS_COPY['is_shared']
+ DEFAULT_ARGS_COPY['fixed_qos_options'] = {'max_throughput_iops': 100}
+ error = create_module(qos_policy_group_module, DEFAULT_ARGS_COPY, fail=True)['msg']
+ assert "Error: 'fixed_qos_options' not supported with ZAPI, use 'max_throughput' and 'min_throughput'" in error
+
+
+def test_successful_create():
+ ''' Test successful create '''
+ register_responses([
+ ('qos-policy-group-get-iter', ZRR['empty']),
+ ('qos-policy-group-create', ZRR['success'])
+ ])
+ assert create_and_apply(qos_policy_group_module, DEFAULT_ARGS)['changed']
+
+
+def test_create_idempotency():
+ ''' Test create idempotency '''
+ register_responses([
+ ('qos-policy-group-get-iter', ZRR['qos_policy_info'])
+ ])
+ assert create_and_apply(qos_policy_group_module, DEFAULT_ARGS)['changed'] is False
+
+
+def test_create_error():
+ ''' Test create error '''
+ register_responses([
+ ('qos-policy-group-get-iter', ZRR['empty']),
+ ('qos-policy-group-create', ZRR['error'])
+ ])
+ error = create_and_apply(qos_policy_group_module, DEFAULT_ARGS, fail=True)['msg']
+ assert 'Error creating qos policy group policy_1' in error
+
+
+def test_successful_delete():
+ ''' Test delete existing volume '''
+ register_responses([
+ ('qos-policy-group-get-iter', ZRR['qos_policy_info']),
+ ('qos-policy-group-delete', ZRR['success'])
+ ])
+ args = {
+ 'state': 'absent',
+ 'force': True
+ }
+ assert create_and_apply(qos_policy_group_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_delete_idempotency():
+ ''' Test delete idempotency '''
+ register_responses([
+ ('qos-policy-group-get-iter', ZRR['empty'])
+ ])
+ assert create_and_apply(qos_policy_group_module, DEFAULT_ARGS, {'state': 'absent'})['changed'] is False
+
+
+def test_delete_error():
+ ''' Test create idempotency '''
+ register_responses([
+ ('qos-policy-group-get-iter', ZRR['qos_policy_info']),
+ ('qos-policy-group-delete', ZRR['error'])
+ ])
+ error = create_and_apply(qos_policy_group_module, DEFAULT_ARGS, {'state': 'absent'}, fail=True)['msg']
+ assert 'Error deleting qos policy group policy_1' in error
+
+
+def test_successful_modify_max_throughput():
+ ''' Test successful modify max throughput '''
+ register_responses([
+ ('qos-policy-group-get-iter', ZRR['qos_policy_info']),
+ ('qos-policy-group-modify', ZRR['success'])
+ ])
+ args = {'max_throughput': '900KB/s,800iops'}
+ assert create_and_apply(qos_policy_group_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_modify_max_throughput_idempotency():
+ ''' Test modify idempotency '''
+ register_responses([
+ ('qos-policy-group-get-iter', ZRR['qos_policy_info'])
+ ])
+ assert create_and_apply(qos_policy_group_module, DEFAULT_ARGS)['changed'] is False
+
+
+def test_modify_error():
+ ''' Test create idempotency '''
+ register_responses([
+ ('qos-policy-group-get-iter', ZRR['qos_policy_info']),
+ ('qos-policy-group-modify', ZRR['error'])
+ ])
+ args = {'max_throughput': '900KB/s,800iops'}
+ error = create_and_apply(qos_policy_group_module, DEFAULT_ARGS, args, fail=True)['msg']
+ assert 'Error modifying qos policy group policy_1' in error
+
+
+def test_modify_is_shared_error():
+ ''' Test create idempotency '''
+ register_responses([
+ ('qos-policy-group-get-iter', ZRR['qos_policy_info'])
+ ])
+ args = {
+ 'is_shared': False,
+ 'max_throughput': '900KB/s,900IOPS'
+ }
+ error = create_and_apply(qos_policy_group_module, DEFAULT_ARGS, args, fail=True)['msg']
+ assert "Error cannot modify 'is_shared' attribute." in error
+
+
+def test_rename():
+ ''' Test rename idempotency '''
+ register_responses([
+ ('qos-policy-group-get-iter', ZRR['empty']),
+ ('qos-policy-group-get-iter', ZRR['qos_policy_info']),
+ ('qos-policy-group-rename', ZRR['success'])
+ ])
+ args = {
+ 'name': 'policy_2',
+ 'from_name': 'policy_1'
+ }
+ assert create_and_apply(qos_policy_group_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_rename_idempotency():
+ ''' Test rename idempotency '''
+ register_responses([
+ ('qos-policy-group-get-iter', ZRR['qos_policy_info'])
+ ])
+ args = {
+ 'from_name': 'policy_1'
+ }
+ assert create_and_apply(qos_policy_group_module, DEFAULT_ARGS, args)['changed'] is False
+
+
+def test_rename_error():
+ ''' Test create idempotency '''
+ register_responses([
+ ('qos-policy-group-get-iter', ZRR['empty']),
+ ('qos-policy-group-get-iter', ZRR['qos_policy_info']),
+ ('qos-policy-group-rename', ZRR['error'])
+ ])
+ args = {
+ 'name': 'policy_2',
+ 'from_name': 'policy_1'
+ }
+ error = create_and_apply(qos_policy_group_module, DEFAULT_ARGS, args, fail=True)['msg']
+ assert 'Error renaming qos policy group policy_1' in error
+
+
+def test_rename_non_existent_policy():
+ ''' Test create idempotency '''
+ register_responses([
+ ('qos-policy-group-get-iter', ZRR['empty']),
+ ('qos-policy-group-get-iter', ZRR['empty'])
+ ])
+ args = {
+ 'name': 'policy_10',
+ 'from_name': 'policy_11'
+ }
+ error = create_and_apply(qos_policy_group_module, DEFAULT_ARGS, args, fail=True)['msg']
+ assert 'Error renaming qos policy group: cannot find' in error
+
+
+def test_get_policy_error():
+ ''' Test create idempotency '''
+ register_responses([
+ ('qos-policy-group-get-iter', ZRR['error'])
+ ])
+ error = create_and_apply(qos_policy_group_module, DEFAULT_ARGS, fail=True)['msg']
+ assert 'Error fetching qos policy group' in error
+
+
+DEFAULT_ARGS_REST = {
+ 'name': 'policy_1',
+ 'vserver': 'policy_vserver',
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'https': 'True',
+ 'use_rest': 'always',
+ 'fixed_qos_options': {
+ 'capacity_shared': False,
+ 'max_throughput_iops': 1000,
+ 'max_throughput_mbps': 100,
+ 'min_throughput_iops': 100,
+ 'min_throughput_mbps': 50
+ }
+}
+
+
+SRR = rest_responses({
+ 'qos_policy_info': (200, {"records": [
+ {
+ "uuid": "e4f703dc-bfbc-11ec-a164-005056b3bd39",
+ "svm": {"name": "policy_vserver"},
+ "name": "policy_1",
+ "fixed": {
+ "max_throughput_iops": 1000,
+ "max_throughput_mbps": 100,
+ "min_throughput_iops": 100,
+ 'min_throughput_mbps': 50,
+ "capacity_shared": False
+ }
+ }
+ ], 'num_records': 1}, None),
+ 'adaptive_policy_info': (200, {"records": [
+ {
+ 'uuid': '30d2fdd6-c45a-11ec-a164-005056b3bd39',
+ 'svm': {'name': 'policy_vserver'},
+ 'name': 'policy_1_',
+ 'adaptive': {
+ 'expected_iops': 200,
+ 'peak_iops': 500,
+ 'absolute_min_iops': 100
+ }
+ }
+ ], 'num_records': 1}, None)
+})
+
+
+def test_successful_create_rest():
+ ''' Test successful create '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/qos/policies', SRR['empty_records']),
+ ('POST', 'storage/qos/policies', SRR['success'])
+ ])
+ assert create_and_apply(qos_policy_group_module, DEFAULT_ARGS_REST)['changed']
+
+
+def test_create_idempotency_rest():
+ ''' Test create idempotency '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/qos/policies', SRR['qos_policy_info']),
+ ])
+ assert create_and_apply(qos_policy_group_module, DEFAULT_ARGS_REST)['changed'] is False
+
+
+def test_successful_create_adaptive_rest():
+ ''' Test successful create '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/qos/policies', SRR['empty_records']),
+ ('POST', 'storage/qos/policies', SRR['success']),
+ # with block size
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'storage/qos/policies', SRR['empty_records']),
+ ('POST', 'storage/qos/policies', SRR['success']),
+ ])
+ DEFAULT_ARGS_COPY = DEFAULT_ARGS_REST.copy()
+ del DEFAULT_ARGS_COPY['fixed_qos_options']
+ DEFAULT_ARGS_COPY['adaptive_qos_options'] = {
+ "absolute_min_iops": 100,
+ "expected_iops": 200,
+ "peak_iops": 500
+ }
+ assert create_and_apply(qos_policy_group_module, DEFAULT_ARGS_COPY)['changed']
+ DEFAULT_ARGS_COPY['adaptive_qos_options']['block_size'] = '4k'
+ assert create_and_apply(qos_policy_group_module, DEFAULT_ARGS_COPY)['changed']
+
+
+def test_partially_supported_option_rest():
+ ''' Test delete error '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'cluster', SRR['is_rest_9_9_0'])
+ ])
+ error = create_module(qos_policy_group_module, DEFAULT_ARGS_REST, fail=True)['msg']
+ assert "Minimum version of ONTAP for 'fixed_qos_options.min_throughput_mbps' is (9, 8, 0)" in error
+ DEFAULT_ARGS_COPY = DEFAULT_ARGS_REST.copy()
+ del DEFAULT_ARGS_COPY['fixed_qos_options']
+ DEFAULT_ARGS_COPY['adaptive_qos_options'] = {
+ "absolute_min_iops": 100,
+ "expected_iops": 200,
+ "peak_iops": 500,
+ "block_size": "4k"
+ }
+ error = create_module(qos_policy_group_module, DEFAULT_ARGS_COPY, fail=True)['msg']
+ assert "Minimum version of ONTAP for 'adaptive_qos_options.block_size' is (9, 10, 1)" in error
+
+
+def test_error_create_adaptive_rest():
+ ''' Test successful create '''
+ DEFAULT_ARGS_COPY = DEFAULT_ARGS_REST.copy()
+ del DEFAULT_ARGS_COPY['fixed_qos_options']
+ DEFAULT_ARGS_COPY['adaptive_qos_options'] = {
+ "absolute_min_iops": 100,
+ "expected_iops": 200
+ }
+ error = create_module(qos_policy_group_module, DEFAULT_ARGS_COPY, fail=True)['msg']
+ assert "missing required arguments: peak_iops found in adaptive_qos_options" in error
+
+
+def test_create_error_rest():
+ ''' Test create error '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/qos/policies', SRR['empty_records']),
+ ('POST', 'storage/qos/policies', SRR['generic_error']),
+ ])
+ error = create_and_apply(qos_policy_group_module, DEFAULT_ARGS_REST, fail=True)['msg']
+ assert 'Error creating qos policy group policy_1' in error
+
+
+def test_successful_delete_rest():
+ ''' Test delete existing volume '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/qos/policies', SRR['qos_policy_info']),
+ ('DELETE', 'storage/qos/policies/e4f703dc-bfbc-11ec-a164-005056b3bd39', SRR['success'])
+ ])
+ assert create_and_apply(qos_policy_group_module, DEFAULT_ARGS_REST, {'state': 'absent'})['changed']
+
+
+def test_delete_idempotency_rest():
+ ''' Test delete idempotency '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/qos/policies', SRR['empty_records'])
+ ])
+ assert create_and_apply(qos_policy_group_module, DEFAULT_ARGS_REST, {'state': 'absent'})['changed'] is False
+
+
+def test_create_error_fixed_adaptive_qos_options_missing():
+ ''' Error if fixed_qos_optios not present in create '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/qos/policies', SRR['empty_records'])
+ ])
+ DEFAULT_ARGS_COPY = DEFAULT_ARGS_REST.copy()
+ del DEFAULT_ARGS_COPY['fixed_qos_options']
+ error = create_and_apply(qos_policy_group_module, DEFAULT_ARGS_COPY, fail=True)['msg']
+ assert "Error: atleast one throughput in 'fixed_qos_options' or all 'adaptive_qos_options'" in error
+
+
+def test_delete_error_rest():
+ ''' Test delete error '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/qos/policies', SRR['qos_policy_info']),
+ ('DELETE', 'storage/qos/policies/e4f703dc-bfbc-11ec-a164-005056b3bd39', SRR['generic_error'])
+ ])
+ error = create_and_apply(qos_policy_group_module, DEFAULT_ARGS_REST, {'state': 'absent'}, fail=True)['msg']
+ assert 'Error deleting qos policy group policy_1' in error
+
+
+def test_successful_modify_max_throughput_rest():
+ ''' Test successful modify max throughput '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/qos/policies', SRR['qos_policy_info']),
+ ('PATCH', 'storage/qos/policies/e4f703dc-bfbc-11ec-a164-005056b3bd39', SRR['success'])
+ ])
+ args = {'fixed_qos_options': {
+ 'max_throughput_iops': 2000,
+ 'max_throughput_mbps': 300,
+ 'min_throughput_iops': 400,
+ 'min_throughput_mbps': 700
+ }}
+ assert create_and_apply(qos_policy_group_module, DEFAULT_ARGS_REST, args)['changed']
+
+
+def test_modify_max_throughput_idempotency_rest():
+ ''' Test modify idempotency '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/qos/policies', SRR['qos_policy_info'])
+ ])
+ assert create_and_apply(qos_policy_group_module, DEFAULT_ARGS_REST)['changed'] is False
+
+
+def test_successful_modify_adaptive_qos_options_rest():
+ ''' Test successful modify max throughput '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'storage/qos/policies', SRR['adaptive_policy_info']),
+ ('PATCH', 'storage/qos/policies/30d2fdd6-c45a-11ec-a164-005056b3bd39', SRR['success'])
+ ])
+ DEFAULT_ARGS_REST_COPY = DEFAULT_ARGS_REST.copy()
+ del DEFAULT_ARGS_REST_COPY['fixed_qos_options']
+ args = {
+ 'adaptive_qos_options': {
+ 'expected_iops': 300,
+ 'peak_iops': 600,
+ 'absolute_min_iops': 200,
+ 'block_size': '4k'
+ }
+ }
+ assert create_and_apply(qos_policy_group_module, DEFAULT_ARGS_REST_COPY, args)['changed']
+
+
+def test_error_adaptive_qos_options_zapi():
+ ''' Test error adaptive_qos_options zapi '''
+ DEFAULT_ARGS_REST_COPY = DEFAULT_ARGS_REST.copy()
+ del DEFAULT_ARGS_REST_COPY['fixed_qos_options']
+ DEFAULT_ARGS_REST_COPY['use_rest'] = 'never'
+ args = {
+ 'adaptive_qos_options': {
+ 'expected_iops': 300,
+ 'peak_iops': 600,
+ 'absolute_min_iops': 200
+ }
+ }
+ error = create_module(qos_policy_group_module, DEFAULT_ARGS_REST_COPY, args, fail=True)['msg']
+ assert "Error: use 'na_ontap_qos_adaptive_policy_group' module for create/modify/delete adaptive policy with ZAPI" in error
+
+
+def test_modify_error_rest():
+ ''' Test modify error rest '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/qos/policies', SRR['qos_policy_info']),
+ ('PATCH', 'storage/qos/policies/e4f703dc-bfbc-11ec-a164-005056b3bd39', SRR['generic_error'])
+ ])
+ args = {'fixed_qos_options': {
+ 'max_throughput_iops': 2000,
+ 'max_throughput_mbps': 300,
+ 'min_throughput_iops': 400,
+ 'min_throughput_mbps': 700
+ }}
+ error = create_and_apply(qos_policy_group_module, DEFAULT_ARGS_REST, args, fail=True)['msg']
+ assert 'Error modifying qos policy group policy_1' in error
+
+
+def test_rename_rest():
+ ''' Test rename '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/qos/policies', SRR['empty_records']),
+ ('GET', 'storage/qos/policies', SRR['qos_policy_info']),
+ ('PATCH', 'storage/qos/policies/e4f703dc-bfbc-11ec-a164-005056b3bd39', SRR['success'])
+ ])
+ args = {
+ 'name': 'policy_2',
+ 'from_name': 'policy_1'
+ }
+ assert create_and_apply(qos_policy_group_module, DEFAULT_ARGS_REST, args)['changed']
+
+
+def test_rename_idempotency_rest():
+ ''' Test rename idempotency '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/qos/policies', SRR['qos_policy_info'])
+ ])
+ args = {
+ 'from_name': 'policy_1'
+ }
+ assert create_and_apply(qos_policy_group_module, DEFAULT_ARGS_REST, args)['changed'] is False
+
+
+def test_rename_error_rest():
+ ''' Test create idempotency '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/qos/policies', SRR['empty_records']),
+ ('GET', 'storage/qos/policies', SRR['qos_policy_info']),
+ ('PATCH', 'storage/qos/policies/e4f703dc-bfbc-11ec-a164-005056b3bd39', SRR['generic_error'])
+ ])
+ args = {
+ 'name': 'policy_2',
+ 'from_name': 'policy_1'
+ }
+ error = create_and_apply(qos_policy_group_module, DEFAULT_ARGS_REST, args, fail=True)['msg']
+ assert 'Error renaming qos policy group policy_1' in error
+
+
+def test_get_policy_error_rest():
+ ''' Test get policy error rest '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/qos/policies', SRR['generic_error'])
+ ])
+ error = create_and_apply(qos_policy_group_module, DEFAULT_ARGS_REST, fail=True)['msg']
+ assert 'Error fetching qos policy group policy_1' in error
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_qtree.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_qtree.py
new file mode 100644
index 000000000..e88fcb852
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_qtree.py
@@ -0,0 +1,404 @@
+# (c) 2018-2023, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests ONTAP Ansible module: na_ontap_quotas '''
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import patch_ansible,\
+ call_main, create_module, create_and_apply, expect_and_capture_ansible_exception, assert_warning_was_raised, print_warnings
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke,\
+ register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_qtree \
+ import NetAppOntapQTree as qtree_module, main as my_main # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+DEFAULT_ARGS = {
+ 'state': 'present',
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'name': 'ansible',
+ 'vserver': 'ansible',
+ 'flexvol_name': 'ansible',
+ 'export_policy': 'ansible',
+ 'security_style': 'unix',
+ 'unix_permissions': '755',
+ 'use_rest': 'never'
+}
+
+
+qtree_info = {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'qtree-info': {
+ 'export-policy': 'ansible',
+ 'vserver': 'ansible',
+ 'qtree': 'ansible',
+ 'oplocks': 'enabled',
+ 'security-style': 'unix',
+ 'mode': '755',
+ 'volume': 'ansible'
+ }
+ }
+}
+
+
+ZRR = zapi_responses({
+ 'qtree_info': build_zapi_response(qtree_info)
+})
+
+
+# REST API canned responses when mocking send_request
+SRR = rest_responses({
+ 'qtree_record': (200, {"records": [{
+ "svm": {"name": "ansible"},
+ "id": 1,
+ "name": "ansible",
+ "security_style": "unix",
+ "unix_permissions": 755,
+ "export_policy": {"name": "ansible"},
+ "volume": {"uuid": "uuid", "name": "volume1"}}
+ ]}, None),
+ 'job_info': (200, {
+ "job": {
+ "uuid": "d78811c1-aebc-11ec-b4de-005056b30cfa",
+ "_links": {"self": {"href": "/api/cluster/jobs/d78811c1-aebc-11ec-b4de-005056b30cfa"}}
+ }}, None),
+ 'job_not_found': (404, "", {"message": "entry doesn't exist", "code": "4", "target": "uuid"})
+})
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ # with python 2.6, dictionaries are not ordered
+ fragments = ["missing required arguments:", "hostname", "name", "vserver", "flexvol_name"]
+ error = create_module(qtree_module, {}, fail=True)['msg']
+ for fragment in fragments:
+ assert fragment in error
+
+
+def test_ensure_get_called():
+ ''' test get_qtree for non-existent qtree'''
+ register_responses([
+ ('qtree-list-iter', ZRR['empty'])
+ ])
+ my_obj = create_module(qtree_module, DEFAULT_ARGS)
+ portset = my_obj.get_qtree()
+ assert portset is None
+
+
+def test_ensure_get_called_existing():
+ ''' test get_qtree for existing qtree'''
+ register_responses([
+ ('qtree-list-iter', ZRR['qtree_info'])
+ ])
+ my_obj = create_module(qtree_module, DEFAULT_ARGS)
+ assert my_obj.get_qtree()
+
+
+def test_successful_create():
+ ''' creating qtree '''
+ register_responses([
+ ('qtree-list-iter', ZRR['empty']),
+ ('qtree-create', ZRR['success'])
+ ])
+ module_args = {
+ 'oplocks': 'enabled'
+ }
+ assert create_and_apply(qtree_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successful_delete():
+ ''' deleting qtree '''
+ register_responses([
+ ('qtree-list-iter', ZRR['qtree_info']),
+ ('qtree-delete', ZRR['success'])
+ ])
+ args = {'state': 'absent'}
+ assert create_and_apply(qtree_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_successful_delete_idempotency():
+ ''' deleting qtree idempotency '''
+ register_responses([
+ ('qtree-list-iter', ZRR['empty'])
+ ])
+ args = {'state': 'absent'}
+ assert create_and_apply(qtree_module, DEFAULT_ARGS, args)['changed'] is False
+
+
+def test_successful_modify():
+ ''' modifying qtree '''
+ register_responses([
+ ('qtree-list-iter', ZRR['qtree_info']),
+ ('qtree-modify', ZRR['success'])
+ ])
+ args = {
+ 'export_policy': 'test',
+ 'oplocks': 'enabled'
+ }
+ assert create_and_apply(qtree_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_failed_rename():
+ ''' test error rename qtree '''
+ register_responses([
+ ('qtree-list-iter', ZRR['empty']),
+ ('qtree-list-iter', ZRR['empty'])
+ ])
+ args = {'from_name': 'test'}
+ error = 'Error renaming: qtree %s does not exist' % args['from_name']
+ assert error in create_and_apply(qtree_module, DEFAULT_ARGS, args, fail=True)['msg']
+
+
+def test_successful_rename():
+ ''' rename qtree '''
+ register_responses([
+ ('qtree-list-iter', ZRR['empty']),
+ ('qtree-list-iter', ZRR['qtree_info']),
+ ('qtree-rename', ZRR['success'])
+ ])
+ args = {'from_name': 'ansible_old'}
+ assert create_and_apply(qtree_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_if_all_methods_catch_exception():
+ ''' test error zapi - get/create/rename/modify/delete'''
+ register_responses([
+ ('qtree-list-iter', ZRR['error']),
+ ('qtree-create', ZRR['error']),
+ ('qtree-rename', ZRR['error']),
+ ('qtree-modify', ZRR['error']),
+ ('qtree-delete', ZRR['error'])
+ ])
+ qtree_obj = create_module(qtree_module, DEFAULT_ARGS, {'from_name': 'name'})
+
+ assert 'Error fetching qtree' in expect_and_capture_ansible_exception(qtree_obj.get_qtree, 'fail')['msg']
+ assert 'Error creating qtree' in expect_and_capture_ansible_exception(qtree_obj.create_qtree, 'fail')['msg']
+ assert 'Error renaming qtree' in expect_and_capture_ansible_exception(qtree_obj.rename_qtree, 'fail')['msg']
+ assert 'Error modifying qtree' in expect_and_capture_ansible_exception(qtree_obj.modify_qtree, 'fail')['msg']
+ assert 'Error deleting qtree' in expect_and_capture_ansible_exception(qtree_obj.delete_qtree, 'fail')['msg']
+
+
+def test_get_error_rest():
+ ''' test get qtree error in rest'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'storage/qtrees', SRR['generic_error'])
+ ])
+ error = 'Error fetching qtree'
+ assert error in create_and_apply(qtree_module, DEFAULT_ARGS, {'use_rest': 'always'}, 'fail')['msg']
+
+
+def test_create_error_rest():
+ ''' test get qtree error in rest'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'storage/qtrees', SRR['empty_records']),
+ ('POST', 'storage/qtrees', SRR['generic_error'])
+ ])
+ error = 'Error creating qtree'
+ assert error in create_and_apply(qtree_module, DEFAULT_ARGS, {'use_rest': 'always'}, 'fail')['msg']
+
+
+def test_modify_error_rest():
+ ''' test get qtree error in rest'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'storage/qtrees', SRR['qtree_record']),
+ ('PATCH', 'storage/qtrees/uuid/1', SRR['generic_error'])
+ ])
+ args = {'use_rest': 'always', 'unix_permissions': '777'}
+ error = 'Error modifying qtree'
+ assert error in create_and_apply(qtree_module, DEFAULT_ARGS, args, 'fail')['msg']
+
+
+def test_rename_error_rest():
+ ''' test get qtree error in rest'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'storage/qtrees', SRR['empty_records']),
+ ('GET', 'storage/qtrees', SRR['empty_records'])
+ ])
+ args = {'use_rest': 'always', 'from_name': 'abcde', 'name': 'qtree'}
+ error = 'Error renaming: qtree'
+ assert error in create_and_apply(qtree_module, DEFAULT_ARGS, args, 'fail')['msg']
+
+
+def test_delete_error_rest():
+ ''' test get qtree error in rest'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'storage/qtrees', SRR['qtree_record']),
+ ('DELETE', 'storage/qtrees/uuid/1', SRR['generic_error'])
+ ])
+ args = {'use_rest': 'always', 'state': 'absent'}
+ error = 'Error deleting qtree'
+ assert error in create_and_apply(qtree_module, DEFAULT_ARGS, args, 'fail')['msg']
+
+
+def test_successful_create_rest():
+ ''' test create qtree rest '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'storage/qtrees', SRR['empty_records']),
+ ('POST', 'storage/qtrees', SRR['success'])
+ ])
+ assert create_and_apply(qtree_module, DEFAULT_ARGS, {'use_rest': 'always'})['changed']
+
+
+def test_idempotent_create_rest():
+ ''' test create qtree idempotency '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'storage/qtrees', SRR['qtree_record'])
+ ])
+ assert create_and_apply(qtree_module, DEFAULT_ARGS, {'use_rest': 'always'})['changed'] is False
+
+
+@patch('time.sleep')
+def test_successful_create_rest_job_error(sleep):
+ ''' test create qtree rest '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'storage/qtrees', SRR['empty_records']),
+ ('POST', 'storage/qtrees', SRR['job_info']),
+ ('GET', 'cluster/jobs/d78811c1-aebc-11ec-b4de-005056b30cfa', SRR['job_not_found']),
+ ('GET', 'cluster/jobs/d78811c1-aebc-11ec-b4de-005056b30cfa', SRR['job_not_found']),
+ ('GET', 'cluster/jobs/d78811c1-aebc-11ec-b4de-005056b30cfa', SRR['job_not_found']),
+ ('GET', 'cluster/jobs/d78811c1-aebc-11ec-b4de-005056b30cfa', SRR['job_not_found'])
+ ])
+ assert create_and_apply(qtree_module, DEFAULT_ARGS, {'use_rest': 'always'})['changed']
+ print_warnings()
+ assert_warning_was_raised('Ignoring job status, assuming success.')
+
+
+def test_successful_delete_rest():
+ ''' test delete qtree rest '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'storage/qtrees', SRR['qtree_record']),
+ ('DELETE', 'storage/qtrees/uuid/1', SRR['success'])
+ ])
+ args = {'use_rest': 'always', 'state': 'absent'}
+ assert create_and_apply(qtree_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_idempotent_delete_rest():
+ ''' test delete qtree idempotency'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'storage/qtrees', SRR['empty_records'])
+ ])
+ args = {'use_rest': 'always', 'state': 'absent'}
+ assert create_and_apply(qtree_module, DEFAULT_ARGS, args)['changed'] is False
+
+
+def test_successful_modify_rest():
+ ''' test modify qtree rest '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'storage/qtrees', SRR['qtree_record']),
+ ('PATCH', 'storage/qtrees/uuid/1', SRR['success'])
+ ])
+ args = {'use_rest': 'always', 'unix_permissions': '777'}
+ assert create_and_apply(qtree_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_idempotent_modify_rest():
+ ''' test modify qtree idempotency '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'storage/qtrees', SRR['qtree_record'])
+ ])
+ args = {'use_rest': 'always'}
+ assert create_and_apply(qtree_module, DEFAULT_ARGS, {'use_rest': 'always'})['changed'] is False
+
+
+def test_successful_rename_rest():
+ ''' test rename qtree rest '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'storage/qtrees', SRR['zero_records']),
+ ('GET', 'storage/qtrees', SRR['qtree_record']),
+ ('PATCH', 'storage/qtrees/uuid/1', SRR['success'])
+ ])
+ args = {'use_rest': 'always', 'from_name': 'abcde', 'name': 'qtree'}
+ assert create_and_apply(qtree_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_successful_rename_rest_idempotent():
+ ''' test rename qtree in rest - idempotency'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'storage/qtrees', SRR['qtree_record'])
+ ])
+ args = {'use_rest': 'always', 'from_name': 'abcde'}
+ assert create_and_apply(qtree_module, DEFAULT_ARGS, args)['changed'] is False
+
+
+def test_successful_rename_and_modify_rest():
+ ''' test rename and modify qtree rest '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'storage/qtrees', SRR['empty_records']),
+ ('GET', 'storage/qtrees', SRR['qtree_record']),
+ ('PATCH', 'storage/qtrees/uuid/1', SRR['success'])
+ ])
+ args = {
+ 'use_rest': 'always',
+ 'from_name': 'abcde',
+ 'name': 'qtree',
+ 'unix_permissions': '744',
+ 'unix_user': 'user',
+ 'unix_group': 'group',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, args)['changed']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.has_netapp_lib')
+def test_missing_netapp_lib(mock_has_netapp_lib):
+ module_args = {
+ 'use_rest': 'never'
+ }
+ mock_has_netapp_lib.return_value = False
+ error = 'Error: the python NetApp-Lib module is required. Import error: None'
+ assert error == call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_force_delete_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'storage/qtrees', SRR['qtree_record']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'force_delete': False,
+ 'state': 'absent'
+ }
+ error = 'Error: force_delete option is not supported for REST, unless set to true.'
+ assert error == call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_rename_qtree_not_used_with_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ }
+ my_obj = create_module(qtree_module, DEFAULT_ARGS, module_args)
+ error = 'Internal error, use modify with REST'
+ assert error in expect_and_capture_ansible_exception(my_obj.rename_qtree, 'fail')['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_quota_policy.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_quota_policy.py
new file mode 100644
index 000000000..e7eb3283c
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_quota_policy.py
@@ -0,0 +1,174 @@
+# (c) 2019, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests for Ansible module: na_ontap_quota_policy '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat import unittest
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_quota_policy \
+ import NetAppOntapQuotaPolicy as quota_policy_module # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+class MockONTAPConnection(object):
+ ''' mock server connection to ONTAP host '''
+
+ def __init__(self, kind=None, data=None):
+ ''' save arguments '''
+ self.kind = kind
+ self.params = data
+ self.xml_in = None
+ self.xml_out = None
+
+ def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
+ ''' mock invoke_successfully returning xml data '''
+ self.xml_in = xml
+ if self.kind == 'quota':
+ xml = self.build_quota_policy_info(self.params, True)
+ if self.kind == 'quota_not_assigned':
+ xml = self.build_quota_policy_info(self.params, False)
+ elif self.kind == 'zapi_error':
+ error = netapp_utils.zapi.NaApiError('test', 'error')
+ raise error
+ self.xml_out = xml
+ return xml
+
+ @staticmethod
+ def build_quota_policy_info(params, assigned):
+ xml = netapp_utils.zapi.NaElement('xml')
+ attributes = {'num-records': 1,
+ 'attributes-list': {
+ 'quota-policy-info': {
+ 'policy-name': params['name']},
+ 'vserver-info': {
+ 'quota-policy': params['name'] if assigned else 'default'}
+ }}
+ xml.translate_struct(attributes)
+ return xml
+
+
+class TestMyModule(unittest.TestCase):
+ ''' Unit tests for na_ontap_quota_policy '''
+
+ def setUp(self):
+ self.mock_quota_policy = {
+ 'state': 'present',
+ 'vserver': 'test_vserver',
+ 'name': 'test_policy'
+ }
+
+ def mock_args(self):
+ return {
+ 'state': self.mock_quota_policy['state'],
+ 'vserver': self.mock_quota_policy['vserver'],
+ 'name': self.mock_quota_policy['name'],
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!'
+ }
+
+ def get_quota_policy_mock_object(self, kind=None):
+ policy_obj = quota_policy_module()
+ if kind is None:
+ policy_obj.server = MockONTAPConnection()
+ else:
+ policy_obj.server = MockONTAPConnection(kind=kind, data=self.mock_quota_policy)
+ return policy_obj
+
+ def test_module_fail_when_required_args_missing(self):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ quota_policy_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+ def test_successfully_create(self):
+ set_module_args(self.mock_args())
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_quota_policy_mock_object().apply()
+ assert exc.value.args[0]['changed']
+
+ def test_create_idempotency(self):
+ set_module_args(self.mock_args())
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_quota_policy_mock_object('quota').apply()
+ assert not exc.value.args[0]['changed']
+
+ def test_cannot_delete(self):
+ data = self.mock_args()
+ data['state'] = 'absent'
+ set_module_args(data)
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_quota_policy_mock_object('quota').apply()
+ msg = 'Error policy test_policy cannot be deleted as it is assigned to the vserver test_vserver'
+ assert msg == exc.value.args[0]['msg']
+
+ def test_successfully_delete(self):
+ data = self.mock_args()
+ data['state'] = 'absent'
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_quota_policy_mock_object('quota_not_assigned').apply()
+ assert exc.value.args[0]['changed']
+
+ def test_delete_idempotency(self):
+ data = self.mock_args()
+ data['state'] = 'absent'
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_quota_policy_mock_object().apply()
+ assert not exc.value.args[0]['changed']
+
+ def test_successfully_assign(self):
+ data = self.mock_args()
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_quota_policy_mock_object('quota_not_assigned').apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_quota_policy.NetAppOntapQuotaPolicy.get_quota_policy')
+ def test_successful_rename(self, get_volume):
+ data = self.mock_args()
+ data['name'] = 'new_policy'
+ data['from_name'] = 'test_policy'
+ set_module_args(data)
+ current = {
+ 'name': 'test_policy'
+ }
+ get_volume.side_effect = [
+ None,
+ current
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_quota_policy_mock_object('quota').apply()
+ assert exc.value.args[0]['changed']
+
+ def test_error(self):
+ data = self.mock_args()
+ set_module_args(data)
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_quota_policy_mock_object('zapi_error').get_quota_policy()
+ assert exc.value.args[0]['msg'] == 'Error fetching quota policy test_policy: NetApp API failed. Reason - test:error'
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_quota_policy_mock_object('zapi_error').create_quota_policy()
+ assert exc.value.args[0]['msg'] == 'Error creating quota policy test_policy: NetApp API failed. Reason - test:error'
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_quota_policy_mock_object('zapi_error').delete_quota_policy()
+ assert exc.value.args[0]['msg'] == 'Error deleting quota policy test_policy: NetApp API failed. Reason - test:error'
+ data['name'] = 'new_policy'
+ data['from_name'] = 'test_policy'
+ set_module_args(data)
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_quota_policy_mock_object('zapi_error').rename_quota_policy()
+ assert exc.value.args[0]['msg'] == 'Error renaming quota policy test_policy: NetApp API failed. Reason - test:error'
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_quotas.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_quotas.py
new file mode 100644
index 000000000..cd03989c6
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_quotas.py
@@ -0,0 +1,853 @@
+# (c) 2019-2023, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests ONTAP Ansible module: na_ontap_quotas '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import assert_no_warnings,\
+ assert_warning_was_raised, call_main, patch_ansible, create_module, create_and_apply, expect_and_capture_ansible_exception, print_warnings
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_error, build_zapi_response, zapi_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_quotas \
+ import NetAppONTAPQuotas as my_module, main as my_main
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+SRR = rest_responses({
+ # module specific responses
+ 'quota_record': (
+ 200,
+ {
+ "records": [
+ {
+ "svm": {
+ "uuid": "671aa46e-11ad-11ec-a267-005056b30cfa",
+ "name": "ansible"
+ },
+ "files": {
+ "hard_limit": "100",
+ "soft_limit": "80"
+ },
+ "qtree": {
+ "id": "1",
+ "name": "qt1"
+ },
+ "space": {
+ "hard_limit": "1222800",
+ "soft_limit": "51200"
+ },
+ "type": "user",
+ "user_mapping": False,
+ "users": [{"name": "quota_user"}],
+ "uuid": "264a9e0b-2e03-11e9-a610-005056a7b72d",
+ "volume": {"name": "fv", "uuid": "264a9e0b-2e03-11e9-a610-005056a7b72da"},
+ "target": {
+ "name": "20:05:00:50:56:b3:0c:fa"
+ },
+ }
+ ],
+ "num_records": 1
+ }, None
+ ),
+ 'quota_record_0_empty_limtis': (200, {"records": [{
+ "svm": {"name": "ansible"},
+ "files": {"hard_limit": 0},
+ "qtree": {"id": "1", "name": "qt1"},
+ "space": {"hard_limit": 0},
+ "type": "user",
+ "user_mapping": False,
+ "users": [{"name": "quota_user"}],
+ "uuid": "264a9e0b-2e03-11e9-a610-005056a7b72d",
+ "volume": {"name": "fv", "uuid": "264a9e0b-2e03-11e9-a610-005056a7b72da"},
+ "target": {"name": "20:05:00:50:56:b3:0c:fa"},
+ }], "num_records": 1}, None),
+ 'quota_status': (
+ 200,
+ {
+ "records": [
+ {
+ "quota": {"state": "off"}
+ }
+ ],
+ "num_records": 1
+ }, None
+ ),
+ 'quota_on': (
+ 200,
+ {
+ "records": [
+ {
+ "quota": {"state": "on"}
+ }
+ ],
+ "num_records": 1
+ }, None
+ ),
+ "no_record": (
+ 200,
+ {"num_records": 0},
+ None),
+ "error_5308572": (409, None, {'code': 5308572, 'message': 'Expected delete error'}),
+ "error_5308569": (409, None, {'code': 5308569, 'message': 'Expected delete error'}),
+ "error_5308568": (409, None, {'code': 5308568, 'message': 'Expected create error'}),
+ "error_5308571": (409, None, {'code': 5308571, 'message': 'Expected create error'}),
+ "error_5308567": (409, None, {'code': 5308567, 'message': 'Expected modify error'}),
+ 'error_rest': (404, None, {"message": "temporarily locked from changes", "code": "4", "target": "uuid"}),
+ "volume_uuid": (200, {"records": [{
+ 'uuid': 'sdgthfd'
+ }], 'num_records': 1}, None),
+ 'job_info': (200, {
+ "job": {
+ "uuid": "d78811c1-aebc-11ec-b4de-005056b30cfa",
+ "_links": {"self": {"href": "/api/cluster/jobs/d78811c1-aebc-11ec-b4de-005056b30cfa"}}
+ }}, None),
+ 'job_not_found': (404, "", {"message": "entry doesn't exist", "code": "4", "target": "uuid"})
+})
+
+
+quota_policy = {
+ 'num-records': 1,
+ 'attributes-list': {'quota-entry': {'volume': 'ansible', 'policy-name': 'policy_name', 'perform-user-mapping': 'true',
+ 'file-limit': '-', 'disk-limit': '-', 'quota-target': '/vol/ansible',
+ 'soft-file-limit': '-', 'soft-disk-limit': '-', 'threshold': '-'}},
+}
+
+quota_policies = {
+ 'num-records': 2,
+ 'attributes-list': [{'quota-policy-info': {'policy-name': 'p1'}},
+ {'quota-policy-info': {'policy-name': 'p2'}}],
+}
+
+ZRR = zapi_responses({
+ 'quota_policy': build_zapi_response(quota_policy, 1),
+ 'quota_on': build_zapi_response({'status': 'on'}, 1),
+ 'quota_off': build_zapi_response({'status': 'off'}, 1),
+ 'quota_policies': build_zapi_response(quota_policies, 1),
+ 'quota_fail': build_zapi_error('TEST', 'This exception is from the unit test'),
+ 'quota_fail_13001': build_zapi_error('13001', 'success'),
+ 'quota_fail_14958': build_zapi_error('14958', 'No valid quota rules found'),
+})
+
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'volume': 'ansible',
+ 'vserver': 'ansible',
+ 'quota_target': '/vol/ansible',
+ 'type': 'user',
+ 'use_rest': 'never'
+}
+
+
+def test_module_fail_when_required_args_missing():
+ error = create_module(my_module, fail=True)['msg']
+ assert 'missing required arguments:' in error
+
+
+def test_ensure_get_called():
+ register_responses([
+ ('ZAPI', 'quota-list-entries-iter', ZRR['empty']),
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ quotas = my_obj.get_quotas()
+ print('QUOTAS', quotas)
+ assert quotas is None
+
+
+def test_ensure_get_quota_not_called():
+ args = dict(DEFAULT_ARGS)
+ args.pop('quota_target')
+ args.pop('type')
+ my_obj = create_module(my_module, args)
+ assert my_obj.get_quotas() is None
+
+
+def test_ensure_get_called_existing():
+ register_responses([
+ ('ZAPI', 'quota-list-entries-iter', ZRR['quota_policy']),
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ quotas = my_obj.get_quotas()
+ print('QUOTAS', quotas)
+ assert quotas
+
+
+def test_successful_create():
+ ''' creating quota and testing idempotency '''
+ register_responses([
+ ('ZAPI', 'quota-list-entries-iter', ZRR['no_records']),
+ ('ZAPI', 'quota-status', ZRR['quota_on']),
+ ('ZAPI', 'quota-set-entry', ZRR['success']),
+ ('ZAPI', 'quota-resize', ZRR['success']),
+ ('ZAPI', 'quota-list-entries-iter', ZRR['quota_policy']),
+ ('ZAPI', 'quota-status', ZRR['quota_on']),
+ ])
+ module_args = {
+ 'file_limit': '3',
+ 'disk_limit': '4',
+ 'perform_user_mapping': False,
+ 'policy': 'policy',
+ 'soft_file_limit': '3',
+ 'soft_disk_limit': '4',
+ 'threshold': '10',
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS)['changed']
+
+
+def test_successful_delete():
+ ''' deleting quota and testing idempotency '''
+ register_responses([
+ ('ZAPI', 'quota-list-entries-iter', ZRR['quota_policy']),
+ ('ZAPI', 'quota-status', ZRR['quota_on']),
+ ('ZAPI', 'quota-delete-entry', ZRR['success']),
+ ('ZAPI', 'quota-resize', ZRR['success']),
+ ('ZAPI', 'quota-list-entries-iter', ZRR['no_records']),
+ ('ZAPI', 'quota-status', ZRR['quota_on']),
+ ])
+ module_args = {
+ 'policy': 'policy',
+ 'state': 'absent'
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+@patch('time.sleep')
+def test_successful_modify(dont_sleep):
+ ''' modifying quota and testing idempotency '''
+ register_responses([
+ ('ZAPI', 'quota-list-entries-iter', ZRR['quota_policy']),
+ ('ZAPI', 'quota-status', ZRR['quota_on']),
+ ('ZAPI', 'quota-modify-entry', ZRR['success']),
+ ('ZAPI', 'quota-off', ZRR['success']),
+ ('ZAPI', 'quota-on', ZRR['success']),
+ ])
+ module_args = {
+ 'activate_quota_on_change': 'reinitialize',
+ 'file_limit': '3',
+ 'policy': 'policy',
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_quota_on_off():
+ ''' quota set on or off '''
+ register_responses([
+ ('ZAPI', 'quota-list-entries-iter', ZRR['quota_policy']),
+ ('ZAPI', 'quota-status', ZRR['quota_off']),
+ ('ZAPI', 'quota-list-entries-iter', ZRR['quota_policy']),
+ ('ZAPI', 'quota-status', ZRR['quota_on']),
+ ('ZAPI', 'quota-off', ZRR['success']),
+ ])
+ module_args = {'set_quota_status': False}
+ assert not create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_if_all_methods_catch_exception():
+ register_responses([
+ ('ZAPI', 'quota-status', ZRR['quota_fail']),
+ ('ZAPI', 'quota-list-entries-iter', ZRR['quota_fail']),
+ ('ZAPI', 'quota-set-entry', ZRR['quota_fail']),
+ ('ZAPI', 'quota-delete-entry', ZRR['quota_fail']),
+ ('ZAPI', 'quota-modify-entry', ZRR['quota_fail']),
+ ('ZAPI', 'quota-on', ZRR['quota_fail']),
+ ('ZAPI', 'quota-policy-get-iter', ZRR['quota_fail']),
+ ('ZAPI', 'quota-resize', ZRR['quota_fail']),
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ assert 'Error fetching quotas status info' in expect_and_capture_ansible_exception(my_obj.get_quota_status, 'fail')['msg']
+ assert 'Error fetching quotas info' in expect_and_capture_ansible_exception(my_obj.get_quotas, 'fail')['msg']
+ assert 'Error adding/modifying quota entry' in expect_and_capture_ansible_exception(my_obj.quota_entry_set, 'fail')['msg']
+ assert 'Error deleting quota entry' in expect_and_capture_ansible_exception(my_obj.quota_entry_delete, 'fail')['msg']
+ assert 'Error modifying quota entry' in expect_and_capture_ansible_exception(my_obj.quota_entry_modify, 'fail', {})['msg']
+ assert 'Error setting quota-on for ansible' in expect_and_capture_ansible_exception(my_obj.on_or_off_quota, 'fail', 'quota-on')['msg']
+ assert 'Error fetching quota policies' in expect_and_capture_ansible_exception(my_obj.get_quota_policies, 'fail')['msg']
+ assert 'Error setting quota-resize for ansible:' in expect_and_capture_ansible_exception(my_obj.resize_quota, 'fail')['msg']
+
+
+def test_get_quota_policies():
+ register_responses([
+ ('ZAPI', 'quota-policy-get-iter', ZRR['quota_policies']),
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ policies = my_obj.get_quota_policies()
+ assert len(policies) == 2
+
+
+def test_debug_quota_get_error_fail():
+ register_responses([
+ ('ZAPI', 'quota-policy-get-iter', ZRR['quota_policies']),
+ ('ZAPI', 'quota-list-entries-iter', ZRR['quota_policy']),
+ ('ZAPI', 'quota-list-entries-iter', ZRR['quota_policy']),
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ error = expect_and_capture_ansible_exception(my_obj.debug_quota_get_error, 'fail', 'dummy error')['msg']
+ assert error.startswith('Error fetching quotas info: dummy error - current vserver policies: ')
+
+
+def test_debug_quota_get_error_success():
+ register_responses([
+ ('ZAPI', 'quota-policy-get-iter', ZRR['quota_policy']),
+ ('ZAPI', 'quota-list-entries-iter', ZRR['quota_policy']),
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ quotas = my_obj.debug_quota_get_error('dummy error')
+ print('QUOTAS', quotas)
+ assert quotas
+
+
+def test_get_no_quota_retry_on_13001():
+ register_responses([
+ ('ZAPI', 'quota-list-entries-iter', ZRR['quota_fail_13001']),
+ ])
+ module_args = {'policy': 'policy'}
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ error = expect_and_capture_ansible_exception(my_obj.get_quotas, 'fail')['msg']
+ assert error.startswith('Error fetching quotas info for policy policy')
+
+
+def test_get_quota_retry_on_13001():
+ register_responses([
+ ('ZAPI', 'quota-list-entries-iter', ZRR['quota_fail_13001']),
+ ('ZAPI', 'quota-policy-get-iter', ZRR['quota_policy']),
+ ('ZAPI', 'quota-list-entries-iter', ZRR['quota_policy']),
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ quotas = my_obj.get_quotas()
+ print('QUOTAS', quotas)
+ assert quotas
+
+
+def test_resize_warning():
+ ''' warning as resize is not allowed if all rules were deleted '''
+ register_responses([
+ ('ZAPI', 'quota-resize', ZRR['quota_fail_14958']),
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.resize_quota('delete')
+ assert_warning_was_raised('Last rule deleted, but quota is on as resize is not allowed.')
+
+
+def test_quota_on_warning():
+ ''' warning as quota-on is not allowed if all rules were deleted '''
+ register_responses([
+ ('ZAPI', 'quota-on', ZRR['quota_fail_14958']),
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.on_or_off_quota('quota-on', 'delete')
+ print_warnings()
+ assert_warning_was_raised('Last rule deleted, quota is off.')
+
+
+def test_convert_size_format():
+ module_args = {'disk_limit': '10MB'}
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.convert_to_kb_or_bytes('disk_limit')
+ print(my_obj.parameters)
+ assert my_obj.parameters['disk_limit'] == '10240'
+ my_obj.parameters['disk_limit'] = '10'
+ assert my_obj.convert_to_kb_or_bytes('disk_limit')
+ print(my_obj.parameters)
+ assert my_obj.parameters['disk_limit'] == '10'
+ my_obj.parameters['disk_limit'] = '10tB'
+ assert my_obj.convert_to_kb_or_bytes('disk_limit')
+ print(my_obj.parameters)
+ assert my_obj.parameters['disk_limit'] == str(10 * 1024 * 1024 * 1024)
+ my_obj.parameters['disk_limit'] = ''
+ assert not my_obj.convert_to_kb_or_bytes('disk_limit')
+ print(my_obj.parameters)
+ assert my_obj.parameters['disk_limit'] == ''
+
+
+def test_error_convert_size_format():
+ module_args = {
+ 'disk_limit': '10MBi',
+ 'quota_target': ''
+ }
+ error = create_module(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert error.startswith('disk_limit input string is not a valid size format')
+ module_args = {
+ 'soft_disk_limit': 'MBi',
+ 'quota_target': ''
+ }
+ error = create_module(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert error.startswith('soft_disk_limit input string is not a valid size format')
+ module_args = {
+ 'soft_disk_limit': '10MB10',
+ 'quota_target': ''
+ }
+ error = create_module(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert error.startswith('soft_disk_limit input string is not a valid size format')
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.has_netapp_lib')
+def test_has_netapp_lib(has_netapp_lib):
+ has_netapp_lib.return_value = False
+ assert call_main(my_main, DEFAULT_ARGS, fail=True)['msg'] == 'Error: the python NetApp-Lib module is required. Import error: None'
+
+
+def create_from_main():
+ register_responses([
+ ('ZAPI', 'quota-list-entries-iter', ZRR['no_records']),
+ ('ZAPI', 'quota-status', ZRR['quota_on']),
+ ('ZAPI', 'quota-set-entry', ZRR['success']),
+ ])
+ assert call_main(my_main, DEFAULT_ARGS)['changed']
+
+
+ARGS_REST = {
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'use_rest': 'always',
+ 'volume': 'ansible',
+ 'vserver': 'ansible',
+ 'quota_target': 'quota_user',
+ 'qtree': 'qt1',
+ 'type': 'user'
+}
+
+
+def test_rest_error_get():
+ '''Test error rest get'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/quota/rules', SRR['generic_error']),
+ ])
+ error = create_and_apply(my_module, ARGS_REST, fail=True)['msg']
+ assert 'Error on getting quota rule info' in error
+
+
+def test_rest_successful_create():
+ '''Test successful rest create'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/quota/rules', SRR['empty_records']),
+ ('GET', 'storage/volumes', SRR['quota_status']),
+ ('POST', 'storage/quota/rules', SRR['empty_good']),
+ ])
+ module_args = {
+ "users": [{"name": "quota_user"}],
+ }
+ assert create_and_apply(my_module, ARGS_REST)
+
+
+@patch('time.sleep')
+def test_rest_successful_create_job_error(sleep):
+ '''Test successful rest create'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/quota/rules', SRR['empty_records']),
+ ('GET', 'storage/volumes', SRR['quota_status']),
+ ('POST', 'storage/quota/rules', SRR['job_info']),
+ ('GET', 'cluster/jobs/d78811c1-aebc-11ec-b4de-005056b30cfa', SRR['job_not_found']),
+ ('GET', 'cluster/jobs/d78811c1-aebc-11ec-b4de-005056b30cfa', SRR['job_not_found']),
+ ('GET', 'cluster/jobs/d78811c1-aebc-11ec-b4de-005056b30cfa', SRR['job_not_found']),
+ ('GET', 'cluster/jobs/d78811c1-aebc-11ec-b4de-005056b30cfa', SRR['job_not_found']),
+ ('GET', 'storage/volumes', SRR['volume_uuid'])
+ ])
+ module_args = {
+ "users": [{"name": "quota_user"}],
+ }
+ assert create_and_apply(my_module, ARGS_REST)
+ print_warnings()
+ assert_warning_was_raised('Ignoring job status, assuming success.')
+
+
+def test_rest_error_create():
+ '''Test error rest create'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/quota/rules', SRR['empty_records']),
+ ('GET', 'storage/volumes', SRR['quota_status']),
+ ('POST', 'storage/quota/rules', SRR['generic_error']),
+ ])
+ error = create_and_apply(my_module, ARGS_REST, fail=True)['msg']
+ assert 'Error on creating quotas rule:' in error
+
+
+def test_delete_rest():
+ ''' Test delete with rest API'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/quota/rules', SRR['quota_record']),
+ ('GET', 'storage/volumes', SRR['quota_status']),
+ ('DELETE', 'storage/quota/rules/264a9e0b-2e03-11e9-a610-005056a7b72d', SRR['empty_good']),
+ ])
+ module_args = {
+ 'state': 'absent'
+ }
+ assert create_and_apply(my_module, ARGS_REST, module_args)
+
+
+def test_error_delete_rest():
+ ''' Test error delete with rest API'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/quota/rules', SRR['quota_record']),
+ ('GET', 'storage/volumes', SRR['quota_status']),
+ ('DELETE', 'storage/quota/rules/264a9e0b-2e03-11e9-a610-005056a7b72d', SRR['generic_error']),
+ ])
+ module_args = {
+ 'state': 'absent'
+ }
+ error = create_and_apply(my_module, ARGS_REST, module_args, fail=True)['msg']
+ assert 'Error on deleting quotas rule:' in error
+
+
+def test_modify_files_limit_rest():
+ ''' Test modify with rest API'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/quota/rules', SRR['quota_record']),
+ ('GET', 'storage/volumes', SRR['quota_on']),
+ ('PATCH', 'storage/quota/rules/264a9e0b-2e03-11e9-a610-005056a7b72d', SRR['empty_good']),
+ ])
+ module_args = {
+ "file_limit": "122", "soft_file_limit": "90"
+ }
+ assert create_and_apply(my_module, ARGS_REST, module_args)
+
+
+def test_modify_space_limit_rest():
+ ''' Test modify with rest API'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/quota/rules', SRR['quota_record']),
+ ('GET', 'storage/volumes', SRR['quota_on']),
+ ('PATCH', 'storage/quota/rules/264a9e0b-2e03-11e9-a610-005056a7b72d', SRR['empty_good']),
+ ])
+ module_args = {
+ "disk_limit": "1024", "soft_disk_limit": "80"
+ }
+ assert create_and_apply(my_module, ARGS_REST, module_args)
+
+
+def test_modify_rest_error():
+ ''' Test negative modify with rest API'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/quota/rules', SRR['quota_record']),
+ ('GET', 'storage/volumes', SRR['quota_status']),
+ ('PATCH', 'storage/quota/rules/264a9e0b-2e03-11e9-a610-005056a7b72d', SRR['generic_error']),
+ ])
+ module_args = {
+ 'perform_user_mapping': True
+ }
+ error = create_and_apply(my_module, ARGS_REST, module_args, fail=True)['msg']
+ assert 'Error on modifying quotas rule:' in error
+
+
+@patch('time.sleep')
+def test_modify_rest_temporary_locked_error(sleep):
+ ''' Test negative modify with rest API'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/quota/rules', SRR['quota_record']),
+ ('GET', 'storage/volumes', SRR['quota_status']),
+ # wait for 60s if we get temporary locl error.
+ ('PATCH', 'storage/quota/rules/264a9e0b-2e03-11e9-a610-005056a7b72d', SRR['error_rest']),
+ ('PATCH', 'storage/quota/rules/264a9e0b-2e03-11e9-a610-005056a7b72d', SRR['error_rest']),
+ ('PATCH', 'storage/quota/rules/264a9e0b-2e03-11e9-a610-005056a7b72d', SRR['success']),
+
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/quota/rules', SRR['quota_record']),
+ ('GET', 'storage/volumes', SRR['quota_status']),
+ # error persist even after 60s
+ ('PATCH', 'storage/quota/rules/264a9e0b-2e03-11e9-a610-005056a7b72d', SRR['error_rest']),
+ ('PATCH', 'storage/quota/rules/264a9e0b-2e03-11e9-a610-005056a7b72d', SRR['error_rest']),
+ ('PATCH', 'storage/quota/rules/264a9e0b-2e03-11e9-a610-005056a7b72d', SRR['error_rest']),
+
+ # wait 60s in create for temporary locked error.
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/quota/rules', SRR['empty_records']),
+ ('GET', 'storage/volumes', SRR['quota_status']),
+ ('POST', 'storage/quota/rules', SRR['error_rest']),
+ ('POST', 'storage/quota/rules', SRR['success']),
+ ])
+ module_args = {
+ 'perform_user_mapping': True
+ }
+ assert create_and_apply(my_module, ARGS_REST, module_args)['changed']
+ assert 'Error on modifying quotas rule:' in create_and_apply(my_module, ARGS_REST, module_args, fail=True)['msg']
+ assert create_and_apply(my_module, ARGS_REST, module_args)['changed']
+
+
+def test_rest_successful_create_idempotency():
+ '''Test successful rest create'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/quota/rules', SRR['quota_record']),
+ ('GET', 'storage/volumes', SRR['quota_status']),
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/quota/rules', SRR['quota_record_0_empty_limtis']),
+ ('GET', 'storage/volumes', SRR['quota_status']),
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/quota/rules', SRR['quota_record_0_empty_limtis']),
+ ('GET', 'storage/volumes', SRR['quota_status'])
+ ])
+ assert create_and_apply(my_module, ARGS_REST)['changed'] is False
+ module_args = {
+ "disk_limit": "0", "soft_disk_limit": "-", "file_limit": 0, "soft_file_limit": "-"
+ }
+ assert create_and_apply(my_module, ARGS_REST, module_args)['changed'] is False
+ module_args = {
+ "disk_limit": "0", "soft_disk_limit": "-1", "file_limit": "0", "soft_file_limit": "-1"
+ }
+ assert create_and_apply(my_module, ARGS_REST, module_args)['changed'] is False
+
+
+def test_rest_successful_delete_idempotency():
+ '''Test successful rest delete'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/quota/rules', SRR['empty_records']),
+ ('GET', 'storage/volumes', SRR['quota_status']),
+ ])
+ module_args = {'use_rest': 'always', 'state': 'absent'}
+ assert create_and_apply(my_module, ARGS_REST, module_args)['changed'] is False
+
+
+def test_modify_quota_status_rest():
+ ''' Test modify quota status and error with rest API'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/quota/rules', SRR['quota_record']),
+ ('GET', 'storage/volumes', SRR['quota_status']),
+ ('PATCH', 'storage/volumes/264a9e0b-2e03-11e9-a610-005056a7b72da', SRR['empty_good'])
+ ])
+ module_args = {"set_quota_status": "on"}
+ assert create_and_apply(my_module, ARGS_REST, module_args)
+
+
+def test_error_convert_size_format_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'cluster', SRR['is_rest']),
+ ])
+ module_args = {
+ 'disk_limit': '10MBi',
+ 'quota_target': ''
+ }
+ error = create_module(my_module, ARGS_REST, module_args, fail=True)['msg']
+ assert error.startswith('disk_limit input string is not a valid size format')
+ module_args = {
+ 'soft_disk_limit': 'MBi',
+ 'quota_target': ''
+ }
+ error = create_module(my_module, ARGS_REST, module_args, fail=True)['msg']
+ assert error.startswith('soft_disk_limit input string is not a valid size format')
+ module_args = {
+ 'soft_disk_limit': '10MB10',
+ 'quota_target': ''
+ }
+ error = create_module(my_module, ARGS_REST, module_args, fail=True)['msg']
+ assert error.startswith('soft_disk_limit input string is not a valid size format')
+
+
+def test_convert_size_format_rest():
+ module_args = {'disk_limit': '10MB'}
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.convert_to_kb_or_bytes('disk_limit')
+ print(my_obj.parameters)
+ assert my_obj.parameters['disk_limit'] == '10240'
+ my_obj.parameters['disk_limit'] = '10'
+ assert my_obj.convert_to_kb_or_bytes('disk_limit')
+ print(my_obj.parameters)
+ assert my_obj.parameters['disk_limit'] == '10'
+ my_obj.parameters['disk_limit'] = '10tB'
+ assert my_obj.convert_to_kb_or_bytes('disk_limit')
+ print(my_obj.parameters)
+ assert my_obj.parameters['disk_limit'] == str(10 * 1024 * 1024 * 1024)
+ my_obj.parameters['disk_limit'] = ''
+ assert not my_obj.convert_to_kb_or_bytes('disk_limit')
+ print(my_obj.parameters)
+ assert my_obj.parameters['disk_limit'] == ''
+
+
+def test_warning_rest_delete_5308572():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/quota/rules', SRR['quota_record']),
+ ('GET', 'storage/volumes', SRR['quota_status']),
+ ('DELETE', 'storage/quota/rules/264a9e0b-2e03-11e9-a610-005056a7b72d', SRR['error_5308572'])
+ ])
+ assert create_and_apply(my_module, ARGS_REST, {'state': 'absent'})['changed']
+ # assert 'Error on deleting quotas rule:' in error
+ msg = "Quota policy rule delete opertation succeeded. However the rule is still being enforced. To stop enforcing, "\
+ "reinitialize(disable and enable again) the quota for volume ansible in SVM ansible."
+ assert_warning_was_raised(msg)
+
+
+@patch('time.sleep')
+def test_no_warning_rest_delete_5308572(sleep):
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/quota/rules', SRR['quota_record']),
+ ('GET', 'storage/volumes', SRR['quota_on']),
+ ('DELETE', 'storage/quota/rules/264a9e0b-2e03-11e9-a610-005056a7b72d', SRR['error_5308572']),
+ ('PATCH', 'storage/volumes/264a9e0b-2e03-11e9-a610-005056a7b72da', SRR['success']),
+ ('PATCH', 'storage/volumes/264a9e0b-2e03-11e9-a610-005056a7b72da', SRR['success'])
+ ])
+ assert create_and_apply(my_module, ARGS_REST, {'state': 'absent', 'activate_quota_on_change': 'reinitialize'})['changed']
+ assert_no_warnings()
+
+
+def test_warning_rest_delete_5308569():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/quota/rules', SRR['quota_record']),
+ ('GET', 'storage/volumes', SRR['quota_status']),
+ ('DELETE', 'storage/quota/rules/264a9e0b-2e03-11e9-a610-005056a7b72d', SRR['error_5308569'])
+ ])
+ assert create_and_apply(my_module, ARGS_REST, {'state': 'absent'})['changed']
+ # assert 'Error on deleting quotas rule:' in error
+ msg = "Quota policy rule delete opertation succeeded. However quota resize failed due to an internal error. To make quotas active, "\
+ "reinitialize(disable and enable again) the quota for volume ansible in SVM ansible."
+ assert_warning_was_raised(msg)
+
+
+@patch('time.sleep')
+def test_no_warning_rest_delete_5308569(sleep):
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/quota/rules', SRR['quota_record']),
+ ('GET', 'storage/volumes', SRR['quota_on']),
+ ('DELETE', 'storage/quota/rules/264a9e0b-2e03-11e9-a610-005056a7b72d', SRR['error_5308569']),
+ ('PATCH', 'storage/volumes/264a9e0b-2e03-11e9-a610-005056a7b72da', SRR['success']),
+ ('PATCH', 'storage/volumes/264a9e0b-2e03-11e9-a610-005056a7b72da', SRR['success'])
+ ])
+ assert create_and_apply(my_module, ARGS_REST, {'state': 'absent', 'activate_quota_on_change': 'reinitialize'})['changed']
+ assert_no_warnings()
+
+
+def test_warning_rest_create_5308568():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/quota/rules', SRR['empty_records']),
+ ('GET', 'storage/volumes', SRR['quota_status']),
+ ('POST', 'storage/quota/rules', SRR['error_5308568']),
+ ('GET', 'storage/volumes', SRR['volume_uuid'])
+ ])
+ assert create_and_apply(my_module, ARGS_REST)['changed']
+ msg = "Quota policy rule create opertation succeeded. However quota resize failed due to an internal error. To make quotas active, "\
+ "reinitialize(disable and enable again) the quota for volume ansible in SVM ansible."
+ assert_warning_was_raised(msg)
+
+
+@patch('time.sleep')
+def test_no_warning_rest_create_5308568(sleep):
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/quota/rules', SRR['empty_records']),
+ ('GET', 'storage/volumes', SRR['quota_on']),
+ ('POST', 'storage/quota/rules', SRR['error_5308568']),
+ ('GET', 'storage/volumes', SRR['volume_uuid']),
+ ('PATCH', 'storage/volumes/sdgthfd', SRR['success']),
+ ('PATCH', 'storage/volumes/sdgthfd', SRR['success'])
+ ])
+ assert create_and_apply(my_module, ARGS_REST, {'activate_quota_on_change': 'reinitialize'})['changed']
+ assert_no_warnings()
+
+
+def test_warning_rest_create_5308571():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/quota/rules', SRR['empty_records']),
+ ('GET', 'storage/volumes', SRR['quota_status']),
+ ('POST', 'storage/quota/rules', SRR['error_5308571']),
+ ('GET', 'storage/volumes', SRR['volume_uuid'])
+ ])
+ assert create_and_apply(my_module, ARGS_REST)['changed']
+ msg = "Quota policy rule create opertation succeeded. but quota resize is skipped. To make quotas active, "\
+ "reinitialize(disable and enable again) the quota for volume ansible in SVM ansible."
+ assert_warning_was_raised(msg)
+
+
+@patch('time.sleep')
+def test_no_warning_rest_create_5308571(sleep):
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/quota/rules', SRR['empty_records']),
+ ('GET', 'storage/volumes', SRR['quota_on']),
+ ('POST', 'storage/quota/rules', SRR['error_5308568']),
+ ('GET', 'storage/volumes', SRR['volume_uuid']),
+ ('PATCH', 'storage/volumes/sdgthfd', SRR['success']),
+ ('PATCH', 'storage/volumes/sdgthfd', SRR['success'])
+ ])
+ assert create_and_apply(my_module, ARGS_REST, {'activate_quota_on_change': 'reinitialize'})['changed']
+ assert_no_warnings()
+
+
+def test_warning_rest_modify_5308567():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/quota/rules', SRR['quota_record']),
+ ('GET', 'storage/volumes', SRR['quota_on']),
+ ('PATCH', 'storage/quota/rules/264a9e0b-2e03-11e9-a610-005056a7b72d', SRR['error_5308567']),
+ ])
+ module_args = {"soft_file_limit": "100"}
+ assert create_and_apply(my_module, ARGS_REST, module_args)
+ msg = "Quota policy rule modify opertation succeeded. However quota resize failed due to an internal error. To make quotas active, "\
+ "reinitialize(disable and enable again) the quota for volume ansible in SVM ansible."
+ assert_warning_was_raised(msg)
+
+
+@patch('time.sleep')
+def test_no_warning_rest_modify_5308567(sleep):
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/quota/rules', SRR['quota_record']),
+ ('GET', 'storage/volumes', SRR['quota_on']),
+ ('PATCH', 'storage/quota/rules/264a9e0b-2e03-11e9-a610-005056a7b72d', SRR['error_5308567']),
+ ('PATCH', 'storage/volumes/264a9e0b-2e03-11e9-a610-005056a7b72da', SRR['success']),
+ ('PATCH', 'storage/volumes/264a9e0b-2e03-11e9-a610-005056a7b72da', SRR['success'])
+ ])
+ module_args = {"soft_file_limit": "100", 'activate_quota_on_change': 'reinitialize'}
+ assert create_and_apply(my_module, ARGS_REST, module_args)['changed']
+ assert_no_warnings()
+
+
+def test_if_all_methods_catch_exception_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'storage/quota/rules', SRR['generic_error']),
+ ('GET', 'storage/volumes', SRR['generic_error']),
+ ('GET', 'storage/volumes', SRR['generic_error']),
+ ('GET', 'storage/volumes', SRR['empty_records']),
+ ('POST', 'storage/quota/rules', SRR['generic_error']),
+ ('DELETE', 'storage/quota/rules/abdcdef', SRR['generic_error']),
+ ('PATCH', 'storage/quota/rules/abdcdef', SRR['generic_error']),
+ ('PATCH', 'storage/volumes/ghijklmn', SRR['generic_error']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+
+ ])
+ my_obj = create_module(my_module, ARGS_REST)
+ my_obj.quota_uuid = 'abdcdef'
+ my_obj.volume_uuid = 'ghijklmn'
+ assert 'Error on getting quota rule info' in expect_and_capture_ansible_exception(my_obj.get_quotas_rest, 'fail')['msg']
+ assert 'Error on getting quota status info' in expect_and_capture_ansible_exception(my_obj.get_quota_status_or_volume_id_rest, 'fail')['msg']
+ assert 'Error on getting volume' in expect_and_capture_ansible_exception(my_obj.get_quota_status_or_volume_id_rest, 'fail', True)['msg']
+ assert 'does not exist' in expect_and_capture_ansible_exception(my_obj.get_quota_status_or_volume_id_rest, 'fail', True)['msg']
+ assert 'Error on creating quotas rule' in expect_and_capture_ansible_exception(my_obj.quota_entry_set_rest, 'fail')['msg']
+ assert 'Error on deleting quotas rule' in expect_and_capture_ansible_exception(my_obj.quota_entry_delete_rest, 'fail')['msg']
+ assert 'Error on modifying quotas rule' in expect_and_capture_ansible_exception(my_obj.quota_entry_modify_rest, 'fail', {})['msg']
+ assert 'Error setting quota-on for ansible' in expect_and_capture_ansible_exception(my_obj.on_or_off_quota_rest, 'fail', 'quota-on')['msg']
+ error = "Error: Qtree cannot be specified for a tree type rule"
+ assert error in create_module(my_module, ARGS_REST, {'qtree': 'qtree1', 'type': 'tree'}, fail=True)['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_rest_cli.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_rest_cli.py
new file mode 100644
index 000000000..d9f89a21a
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_rest_cli.py
@@ -0,0 +1,128 @@
+# (c) 2019-2023, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests for Ansible module: na_ontap_rest_cli'''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+import sys
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import\
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import\
+ call_main, create_module, expect_and_capture_ansible_exception, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_rest_cli import NetAppONTAPCommandREST as my_module, main as my_main # module under test
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+# REST API canned responses when mocking send_request
+SRR = rest_responses({
+ 'allow': (200, {'Allow': ['GET', 'WHATEVER']}, None)
+}, False)
+
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'use_rest': 'auto',
+ 'command': 'volume',
+ 'verb': 'GET',
+ 'params': {'fields': 'size,percent_used'}
+}
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ register_responses([
+ ])
+ args = dict(DEFAULT_ARGS)
+ args.pop('verb')
+ error = 'missing required arguments: verb'
+ assert error in call_main(my_main, args, fail=True)['msg']
+
+
+def test_rest_cli():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'private/cli/volume', SRR['empty_good']),
+ ])
+ assert call_main(my_main, DEFAULT_ARGS)['changed'] is False
+
+
+def test_rest_cli_options():
+ module_args = {'verb': 'OPTIONS'}
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('OPTIONS', 'private/cli/volume', SRR['allow']),
+ ])
+ exit_json = call_main(my_main, DEFAULT_ARGS, module_args)
+ assert not exit_json['changed']
+ assert 'Allow' in exit_json['msg']
+
+
+def test_negative_connection_error():
+ module_args = {'verb': 'OPTIONS'}
+ register_responses([
+ ('GET', 'cluster', SRR['generic_error']),
+ ])
+ msg = "failed to connect to REST over hostname: ['Expected error']. Use na_ontap_command for non-rest CLI."
+ assert msg in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def check_verb(verb):
+ module_args = {'verb': verb}
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ (verb, 'private/cli/volume', SRR['allow']),
+ ], "test_verbs")
+
+ exit_json = call_main(my_main, DEFAULT_ARGS, module_args)
+ assert not exit_json['changed'] if verb in ['GET', 'OPTIONS'] else exit_json['changed']
+ assert 'Allow' in exit_json['msg']
+ # assert mock_request.call_args[0][0] == verb
+
+
+def test_verbs():
+ for verb in ['POST', 'DELETE', 'PATCH', 'OPTIONS', 'PATCH']:
+ check_verb(verb)
+
+
+def test_check_mode():
+ module_args = {'verb': 'GET'}
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ my_obj.module.check_mode = True
+ result = expect_and_capture_ansible_exception(my_obj.apply, 'exit')
+ assert result['changed'] is False
+ msg = "Would run command: 'volume'"
+ assert msg in result['msg']
+
+
+def test_negative_verb():
+ module_args = {'verb': 'GET'}
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ my_obj.verb = 'INVALID'
+ msg = 'Error: unexpected verb INVALID'
+ assert msg in expect_and_capture_ansible_exception(my_obj.apply, 'fail')['msg']
+
+
+def test_negative_error():
+ module_args = {'verb': 'GET'}
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'private/cli/volume', SRR['generic_error']),
+ ])
+ msg = 'Error: Expected error'
+ assert msg in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_rest_info.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_rest_info.py
new file mode 100644
index 000000000..bf678e3ac
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_rest_info.py
@@ -0,0 +1,1195 @@
+# (c) 2020-2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' Unit Tests NetApp ONTAP REST APIs Ansible module: na_ontap_rest_info '''
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import sys
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import call_main, create_module, \
+ expect_and_capture_ansible_exception, patch_ansible, create_and_apply, assert_warning_was_raised, print_warnings
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import \
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_rest_info \
+ import NetAppONTAPGatherInfo as ontap_rest_info_module, main as my_main
+
+if sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+# REST API canned responses when mocking send_request
+SRR = rest_responses({
+ # common responses
+ 'validate_ontap_version_pass': (
+ 200, dict(version=dict(generation=9, major=10, minor=1, full='dummy_9_10_1')), None),
+ 'validate_ontap_version_fail': (200, None, 'API not found error'),
+ 'error_invalid_api': (500, None, {'code': 3, 'message': 'Invalid API'}),
+ 'error_user_is_not_authorized': (500, None, {'code': 6, 'message': 'user is not authorized'}),
+ 'error_no_processing': (500, None, {'code': 123, 'message': 'error reported as is'}),
+ 'error_no_aggr_recommendation': (
+ 500, None, {'code': 19726344, 'message': 'No recommendation can be made for this cluster'}),
+ 'get_subset_info': (200,
+ {'_links': {'self': {'href': 'dummy_href'}},
+ 'num_records': 3,
+ 'records': [{'name': 'dummy_vol1'},
+ {'name': 'dummy_vol2'},
+ {'name': 'dummy_vol3'}],
+ 'version': 'ontap_version'}, None),
+ 'get_subset_info_with_next': (200,
+ {'_links': {'self': {'href': 'dummy_href'},
+ 'next': {'href': '/api/next_record_api'}},
+ 'num_records': 3,
+ 'records': [{'name': 'dummy_vol1'},
+ {'name': 'dummy_vol2'},
+ {'name': 'dummy_vol3'}],
+ 'version': 'ontap_version'}, None),
+ 'get_next_record': (200,
+ {'_links': {'self': {'href': 'dummy_href'}},
+ 'num_records': 2,
+ 'records': [{'name': 'dummy_vol1'},
+ {'name': 'dummy_vol2'}],
+ 'version': 'ontap_version'}, None),
+ 'metrocluster_post': (200,
+ {'job': {
+ 'uuid': 'fde79888-692a-11ea-80c2-005056b39fe7',
+ '_links': {
+ 'self': {
+ 'href': '/api/cluster/jobs/fde79888-692a-11ea-80c2-005056b39fe7'}}
+ }},
+ None),
+ 'metrocluster_return': (200,
+ {"_links": {
+ "self": {
+ "href": "/api/cluster/metrocluster/diagnostics"
+ }
+ }, "aggregate": {
+ "state": "ok",
+ "summary": {
+ "message": ""
+ }, "timestamp": "2020-07-22T16:42:51-07:00"
+ }}, None),
+ 'job': (200,
+ {
+ "uuid": "cca3d070-58c6-11ea-8c0c-005056826c14",
+ "description": "POST /api/cluster/metrocluster",
+ "state": "success",
+ "message": "There are not enough disks in Pool1.",
+ "code": 2432836,
+ "start_time": "2020-02-26T10:35:44-08:00",
+ "end_time": "2020-02-26T10:47:38-08:00",
+ "_links": {
+ "self": {
+ "href": "/api/cluster/jobs/cca3d070-58c6-11ea-8c0c-005056826c14"
+ }
+ }
+ }, None),
+ 'get_private_cli_subset_info': (200,
+ {
+ 'records': [
+ {'node': 'node1', 'check_type': 'type'},
+ {'node': 'node1', 'check_type': 'type'},
+ {'node': 'node1', 'check_type': 'type'}],
+ "num_records": 3}, None),
+ 'get_private_cli_vserver_security_file_directory_info': (
+ 200,
+ {
+ 'records': [
+ {'acls': ['junk', 'junk', 'DACL - ACEs', 'AT-user-0x123']},
+ {'node': 'node1', 'check_type': 'type'},
+ {'node': 'node1', 'check_type': 'type'}],
+ "num_records": 3}, None),
+ 'lun_info': (200, {'records': [{"serial_number": "z6CcD+SK5mPb"}]}, None),
+ 'volume_info': (200, {"uuid": "7882901a-1aef-11ec-a267-005056b30cfa"}, None),
+ 'svm_uuid': (200, {"records": [{"uuid": "test_uuid"}], "num_records": 1}, None),
+ 'get_uuid_policy_id_export_policy': (
+ 200,
+ {
+ "records": [{
+ "svm": {
+ "uuid": "uuid",
+ "name": "svm"},
+ "id": 123,
+ "name": "ansible"
+ }],
+ "num_records": 1}, None),
+ 'vscan_on_access_policies': (
+ 200, {"records": [
+ {
+ "name": "on-access-test",
+ "mandatory": True,
+ "scope": {
+ "scan_readonly_volumes": True,
+ "exclude_paths": [
+ "\\dir1\\dir2\\name",
+ "\\vol\\a b",
+ "\\vol\\a,b\\"
+ ],
+ "scan_without_extension": True,
+ "include_extensions": [
+ "mp*",
+ "txt"
+ ],
+ "exclude_extensions": [
+ "mp*",
+ "txt"
+ ],
+ "only_execute_access": True,
+ "max_file_size": "2147483648"
+ },
+ "enabled": True
+ }
+ ]}, None
+ ),
+ 'vscan_on_demand_policies': (
+ 200, {"records": [
+ {
+ "log_path": "/vol0/report_dir",
+ "scan_paths": [
+ "/vol1/",
+ "/vol2/cifs/"
+ ],
+ "name": "task-1",
+ "svm": {
+ "_links": {
+ "self": {
+ "href": "/api/resourcelink"
+ }
+ },
+ "name": "svm1",
+ "uuid": "02c9e252-41be-11e9-81d5-00a0986138f7"
+ },
+ "scope": {
+ "exclude_paths": [
+ "/vol1/cold-files/",
+ "/vol1/cifs/names"
+ ],
+ "scan_without_extension": True,
+ "include_extensions": [
+ "vmdk",
+ "mp*"
+ ],
+ "exclude_extensions": [
+ "mp3",
+ "mp4"
+ ],
+ "max_file_size": "10737418240"
+ },
+ "schedule": {
+ "_links": {
+ "self": {
+ "href": "/api/resourcelink"
+ }
+ },
+ "name": "weekly",
+ "uuid": "1cd8a442-86d1-11e0-ae1c-123478563412"
+ }
+ }
+ ]}, None
+ ),
+ 'vscan_scanner_pools': (
+ 200, {"records": [
+ {
+ "cluster": {
+ "_links": {
+ "self": {
+ "href": "/api/resourcelink"
+ }
+ },
+ "name": "cluster1",
+ "uuid": "1cd8a442-86d1-11e0-ae1c-123478563412"
+ },
+ "name": "scanner-1",
+ "servers": [
+ "1.1.1.1",
+ "10.72.204.27",
+ "vmwin204-27.fsct.nb"
+ ],
+ "privileged_users": [
+ "cifs\\u1",
+ "cifs\\u2"
+ ],
+ "svm": {
+ "_links": {
+ "self": {
+ "href": "/api/resourcelink"
+ }
+ },
+ "name": "svm1",
+ "uuid": "02c9e252-41be-11e9-81d5-00a0986138f7"
+ },
+ "role": "primary"
+ }
+ ]}, None
+ )
+})
+
+ALL_SUBSETS = ['application/applications',
+ 'application/consistency-groups',
+ 'application/templates',
+ 'cloud/targets',
+ 'cluster',
+ 'cluster/chassis',
+ 'cluster/counter/tables',
+ 'cluster/fireware/history',
+ 'cluster/jobs',
+ 'cluster/licensing/capacity-pools',
+ 'cluster/licensing/license-managers',
+ 'cluster/licensing/licenses',
+ 'cluster/mediators',
+ 'cluster/metrics',
+ 'cluster/metrocluster',
+ 'cluster/metrocluster/diagnostics',
+ 'cluster/metrocluster/dr-groups',
+ 'cluster/metrocluster/interconnects',
+ 'cluster/metrocluster/nodes',
+ 'cluster/metrocluster/operations',
+ 'cluster/metrocluster/svms',
+ 'cluster/nodes',
+ 'cluster/ntp/keys',
+ 'cluster/ntp/servers',
+ 'cluster/peers',
+ 'cluster/schedules',
+ 'cluster/sensors',
+ 'cluster/software',
+ 'cluster/software/download',
+ 'cluster/software/history',
+ 'cluster/software/packages',
+ 'cluster/web',
+ 'name-services/cache/group-membership/settings',
+ 'name-services/cache/host/settings',
+ 'name-services/cache/netgroup/settings',
+ 'name-services/cache/setting',
+ 'name-services/cache/unix-group/settings',
+ 'name-services/dns',
+ 'name-services/ldap',
+ 'name-services/ldap-schemas',
+ 'name-services/local-hosts',
+ 'name-services/name-mappings',
+ 'name-services/nis',
+ 'name-services/unix-groups',
+ 'name-services/unix-users',
+ 'network/ethernet/broadcast-domains',
+ 'network/ethernet/ports',
+ 'network/ethernet/switch/ports',
+ 'network/ethernet/switches',
+ 'network/fc/fabrics',
+ 'network/fc/interfaces',
+ 'network/fc/logins',
+ 'network/fc/ports',
+ 'network/fc/wwpn-aliases',
+ 'network/http-proxy',
+ 'network/ip/bgp/peer-groups',
+ 'network/ip/interfaces',
+ 'network/ip/routes',
+ 'network/ip/service-policies',
+ 'network/ip/subnets',
+ 'network/ipspaces',
+ 'private/support/alerts',
+ 'protocols/active-directory',
+ 'protocols/audit',
+ 'protocols/cifs/connections',
+ 'protocols/cifs/domains',
+ 'protocols/cifs/group-policies',
+ 'protocols/cifs/home-directory/search-paths',
+ 'protocols/cifs/local-groups',
+ 'protocols/cifs/local-users',
+ 'protocols/cifs/netbios',
+ 'protocols/cifs/services',
+ 'protocols/cifs/session/files',
+ 'protocols/cifs/sessions',
+ 'protocols/cifs/shadow-copies',
+ 'protocols/cifs/shadowcopy-sets',
+ 'protocols/cifs/shares',
+ 'protocols/cifs/users-and-groups/privileges',
+ 'protocols/cifs/unix-symlink-mapping',
+ 'protocols/fpolicy',
+ 'protocols/locks',
+ 'protocols/ndmp',
+ 'protocols/ndmp/nodes',
+ 'protocols/ndmp/sessions',
+ 'protocols/ndmp/svms',
+ 'protocols/nfs/connected-clients',
+ 'protocols/nfs/connected-client-maps',
+ 'protocols/nfs/connected-client-settings',
+ 'protocols/nfs/export-policies',
+ 'protocols/nfs/kerberos/interfaces',
+ 'protocols/nfs/kerberos/realms',
+ 'protocols/nfs/services',
+ 'protocols/nvme/interfaces',
+ 'protocols/nvme/services',
+ 'protocols/nvme/subsystems',
+ 'protocols/nvme/subsystem-controllers',
+ 'protocols/nvme/subsystem-maps',
+ 'protocols/s3/buckets',
+ 'protocols/s3/services',
+ 'protocols/san/fcp/services',
+ 'protocols/san/igroups',
+ 'protocols/san/iscsi/credentials',
+ 'protocols/san/iscsi/services',
+ 'protocols/san/iscsi/sessions',
+ 'protocols/san/lun-maps',
+ 'protocols/san/portsets',
+ 'protocols/san/vvol-bindings',
+ 'protocols/vscan',
+ 'protocols/vscan/server-status',
+ 'security',
+ 'security/accounts',
+ 'security/anti-ransomware/suspects',
+ 'security/audit',
+ 'security/audit/destinations',
+ 'security/audit/messages',
+ 'security/authentication/cluster/ad-proxy',
+ 'security/authentication/cluster/ldap',
+ 'security/authentication/cluster/nis',
+ 'security/authentication/cluster/saml-sp',
+ 'security/authentication/publickeys',
+ 'security/aws-kms',
+ 'security/azure-key-vaults',
+ 'security/certificates',
+ 'security/gcp-kms',
+ 'security/ipsec',
+ 'security/ipsec/ca-certificates',
+ 'security/ipsec/policies',
+ 'security/ipsec/security-associations',
+ 'security/key-manager-configs',
+ 'security/key-managers',
+ 'security/key-stores',
+ 'security/login/messages',
+ 'security/multi-admin-verify',
+ 'security/multi-admin-verify/approval-groups',
+ 'security/multi-admin-verify/requests',
+ 'security/multi-admin-verify/rules',
+ 'security/roles',
+ 'security/ssh',
+ 'security/ssh/svms',
+ 'snapmirror/policies',
+ 'snapmirror/relationships',
+ 'storage/aggregates',
+ 'storage/bridges',
+ 'storage/cluster',
+ 'storage/disks',
+ 'storage/file/clone/split-loads',
+ 'storage/file/clone/split-status',
+ 'storage/file/clone/tokens',
+ 'storage/file/moves',
+ 'storage/flexcache/flexcaches',
+ 'storage/flexcache/origins',
+ 'storage/luns',
+ 'storage/namespaces',
+ 'storage/pools',
+ 'storage/ports',
+ 'storage/qos/policies',
+ 'storage/qos/workloads',
+ 'storage/qtrees',
+ 'storage/quota/reports',
+ 'storage/quota/rules',
+ 'storage/shelves',
+ 'storage/snaplock/audit-logs',
+ 'storage/snaplock/compliance-clocks',
+ 'storage/snaplock/event-retention/operations',
+ 'storage/snaplock/event-retention/policies',
+ 'storage/snaplock/file-fingerprints',
+ 'storage/snaplock/litigations',
+ 'storage/snapshot-policies',
+ 'storage/switches',
+ 'storage/tape-devices',
+ 'storage/volumes',
+ 'storage/volume-efficiency-policies',
+ 'support/autosupport',
+ 'support/autosupport/check',
+ 'support/autosupport/messages',
+ 'support/auto-update',
+ 'support/auto-update/configurations',
+ 'support/auto-update/updates',
+ 'support/configuration-backup',
+ 'support/configuration-backup/backups',
+ 'support/coredump/coredumps',
+ 'support/ems',
+ 'support/ems/destinations',
+ 'support/ems/events',
+ 'support/ems/filters',
+ 'support/ems/messages',
+ 'support/snmp',
+ 'support/snmp/traphosts',
+ 'support/snmp/users',
+ 'svm/migrations',
+ 'svm/peers',
+ 'svm/peer-permissions',
+ 'svm/svms']
+
+# Super Important, Metrocluster doesn't call get_subset_info and has 3 api calls instead of 1!!!!
+# The metrocluster calls need to be in the correct place. The Module return the keys in a sorted list.
+ALL_RESPONSES = [
+ ('GET', 'cluster', SRR['validate_ontap_version_pass']),
+ ('GET', 'application/applications', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', 'application/templates', SRR['get_subset_info']),
+ ('GET', 'cloud/targets', SRR['get_subset_info']),
+ ('GET', 'cluster', SRR['get_subset_info']),
+ ('GET', 'cluster/chassis', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', 'cluster/jobs', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', 'cluster/licensing/licenses', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', 'cluster/metrics', SRR['get_subset_info']),
+ ('GET', 'cluster/metrocluster', SRR['get_subset_info']),
+ # MCC DIAGs
+ ('POST', 'cluster/metrocluster/diagnostics', SRR['metrocluster_post']),
+ ('GET', 'cluster/jobs/fde79888-692a-11ea-80c2-005056b39fe7', SRR['job']),
+ ('GET', 'cluster/metrocluster/diagnostics', SRR['metrocluster_return']),
+ # Back to normal
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', 'cluster/metrocluster/nodes', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', 'cluster/nodes', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', 'cluster/ntp/servers', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', 'support/ems/filters', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', '*', SRR['get_subset_info']),
+ ('GET', 'svm/peer-permissions', SRR['get_subset_info']),
+ ('GET', 'svm/peers', SRR['get_subset_info']),
+ ('GET', 'svm/svms', SRR['get_private_cli_subset_info']),
+]
+
+
+def set_default_args():
+ return dict({
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'https': True,
+ 'validate_certs': False
+ })
+
+
+def set_args_run_ontap_version_check():
+ return dict({
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'https': True,
+ 'validate_certs': False,
+ 'max_records': 1024,
+ 'gather_subset': ['volume_info']
+ })
+
+
+def set_args_run_metrocluster_diag():
+ return dict({
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'https': True,
+ 'validate_certs': False,
+ 'max_records': 1024,
+ 'gather_subset': ['cluster/metrocluster/diagnostics']
+ })
+
+
+def set_args_run_ontap_gather_facts_for_vserver_info():
+ return dict({
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'https': True,
+ 'validate_certs': False,
+ 'max_records': 1024,
+ 'gather_subset': ['vserver_info']
+ })
+
+
+def set_args_run_ontap_gather_facts_for_volume_info():
+ return dict({
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'https': True,
+ 'validate_certs': False,
+ 'max_records': 1024,
+ 'gather_subset': ['volume_info']
+ })
+
+
+def set_args_run_ontap_gather_facts_for_all_subsets():
+ return dict({
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'https': True,
+ 'validate_certs': False,
+ 'max_records': 1024,
+ 'gather_subset': ['all']
+ })
+
+
+def set_args_run_ontap_gather_facts_for_all_subsets_with_fields_section_pass():
+ return dict({
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'https': True,
+ 'validate_certs': False,
+ 'max_records': 1024,
+ 'fields': '*',
+ 'gather_subset': ['all']
+ })
+
+
+def set_args_run_ontap_gather_facts_for_all_subsets_with_fields_section_fail():
+ return dict({
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'https': True,
+ 'validate_certs': False,
+ 'max_records': 1024,
+ 'fields': ['uuid', 'name', 'node'],
+ 'gather_subset': ['all']
+ })
+
+
+def set_args_run_ontap_gather_facts_for_aggregate_info_with_fields_section_pass():
+ return dict({
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'https': True,
+ 'fields': ['uuid', 'name', 'node'],
+ 'validate_certs': False,
+ 'max_records': 1024,
+ 'gather_subset': ['aggregate_info']
+ })
+
+
+def set_args_get_all_records_for_volume_info_to_check_next_api_call_functionality_pass():
+ return dict({
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'https': True,
+ 'validate_certs': False,
+ 'max_records': 3,
+ 'gather_subset': ['volume_info']
+ })
+
+
+def test_run_ontap_version_check_for_9_6_pass():
+ register_responses([
+ ('GET', 'cluster', SRR['validate_ontap_version_pass']),
+ ('GET', 'storage/volumes', SRR['get_subset_info']),
+ ])
+ assert not create_and_apply(ontap_rest_info_module, set_args_run_ontap_version_check())['changed']
+
+
+def test_run_ontap_version_check_for_10_2_pass():
+ register_responses([
+ ('GET', 'cluster', SRR['validate_ontap_version_pass']),
+ ('GET', 'storage/volumes', SRR['get_subset_info']),
+ ])
+ assert not create_and_apply(ontap_rest_info_module, set_args_run_ontap_version_check())['changed']
+
+
+def test_run_ontap_version_check_for_9_2_fail():
+ ''' Test for Checking the ONTAP version '''
+ register_responses([
+ ('GET', 'cluster', SRR['validate_ontap_version_fail']),
+ ])
+ assert call_main(my_main, set_args_run_ontap_version_check(),
+ fail=True)['msg'] == 'Error using REST for version, error: %s.' % SRR['validate_ontap_version_fail'][2]
+
+
+def test_version_warning_message():
+ gather_subset = ['cluster/metrocluster/diagnostics']
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ])
+ create_and_apply(ontap_rest_info_module, set_args_run_metrocluster_diag())
+ assert_warning_was_raised('The following subset have been removed from your query as they are not supported on ' +
+ 'your version of ONTAP cluster/metrocluster/diagnostics requires (9, 8), ')
+
+
+# metrocluster/diagnostics doesn't call get_subset_info and has 3 api calls instead of 1
+def test_run_metrocluster_pass():
+ gather_subset = ['cluster/metrocluster/diagnostics']
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('POST', 'cluster/metrocluster/diagnostics', SRR['metrocluster_post']),
+ ('GET', 'cluster/jobs/fde79888-692a-11ea-80c2-005056b39fe7', SRR['job']),
+ ('GET', 'cluster/metrocluster/diagnostics', SRR['metrocluster_return']),
+ ])
+ assert set(create_and_apply(ontap_rest_info_module, set_args_run_metrocluster_diag())['ontap_info']) == set(
+ gather_subset)
+
+
+def test_run_ontap_gather_facts_for_vserver_info_pass():
+ gather_subset = ['svm/svms']
+ register_responses([
+ ('GET', 'cluster', SRR['validate_ontap_version_pass']),
+ ('GET', 'svm/svms', SRR['get_subset_info']),
+ ])
+ assert set(create_and_apply(ontap_rest_info_module, set_args_run_ontap_gather_facts_for_vserver_info())['ontap_info']) == set(gather_subset)
+
+
+def test_run_ontap_gather_facts_for_volume_info_pass():
+ gather_subset = ['storage/volumes']
+ register_responses([
+ ('GET', 'cluster', SRR['validate_ontap_version_pass']),
+ ('GET', 'storage/volumes', SRR['get_subset_info']),
+ ])
+ assert set(create_and_apply(ontap_rest_info_module, set_args_run_ontap_gather_facts_for_volume_info())['ontap_info']) == set(gather_subset)
+
+
+def test_run_ontap_gather_facts_for_all_subsets_pass():
+ gather_subset = ALL_SUBSETS
+ register_responses(ALL_RESPONSES)
+ assert set(create_and_apply(ontap_rest_info_module, set_args_run_ontap_gather_facts_for_all_subsets())['ontap_info']) == set(gather_subset)
+
+
+def test_run_ontap_gather_facts_for_all_subsets_with_fields_section_pass():
+ gather_subset = ALL_SUBSETS
+ register_responses(ALL_RESPONSES)
+ assert set(create_and_apply(ontap_rest_info_module,
+ set_args_run_ontap_gather_facts_for_all_subsets_with_fields_section_pass()
+ )['ontap_info']) == set(gather_subset)
+
+
+def test_run_ontap_gather_facts_for_all_subsets_with_fields_section_fail():
+ error_message = "Error: fields: %s, only one subset will be allowed." \
+ % set_args_run_ontap_gather_facts_for_aggregate_info_with_fields_section_pass()['fields']
+ register_responses([
+ ('GET', 'cluster', SRR['validate_ontap_version_pass']),
+ ])
+ assert \
+ create_and_apply(ontap_rest_info_module,
+ set_args_run_ontap_gather_facts_for_all_subsets_with_fields_section_fail(),
+ fail=True
+ )['msg'] == error_message
+
+
+def test_run_ontap_gather_facts_for_aggregate_info_pass_with_fields_section_pass():
+ gather_subset = ['storage/aggregates']
+ register_responses([
+ ('GET', 'cluster', SRR['validate_ontap_version_pass']),
+ ('GET', 'storage/aggregates', SRR['get_subset_info']),
+ ])
+ assert set(create_and_apply(ontap_rest_info_module,
+ set_args_run_ontap_gather_facts_for_aggregate_info_with_fields_section_pass()
+ )['ontap_info']) == set(gather_subset)
+
+
+def test_get_all_records_for_volume_info_to_check_next_api_call_functionality_pass():
+ total_records = 5
+ register_responses([
+ ('GET', 'cluster', SRR['validate_ontap_version_pass']),
+ ('GET', 'storage/volumes', SRR['get_subset_info_with_next']),
+ ('GET', '/next_record_api', SRR['get_next_record']),
+ ])
+ assert create_and_apply(ontap_rest_info_module,
+ set_args_get_all_records_for_volume_info_to_check_next_api_call_functionality_pass()
+ )['ontap_info']['storage/volumes']['num_records'] == total_records
+
+
+def test_get_all_records_for_volume_info_to_check_next_api_call_functionality_pass_python_keys():
+ args = set_args_get_all_records_for_volume_info_to_check_next_api_call_functionality_pass()
+ args['use_python_keys'] = True
+ args['state'] = 'info'
+ total_records = 5
+ register_responses([
+ ('GET', 'cluster', SRR['validate_ontap_version_pass']),
+ ('GET', 'storage/volumes', SRR['get_subset_info_with_next']),
+ ('GET', '/next_record_api', SRR['get_next_record']),
+ ])
+ assert create_and_apply(ontap_rest_info_module, args)['ontap_info']['storage_volumes']['num_records'] == total_records
+
+
+def test_get_all_records_for_volume_info_with_parameters():
+ args = set_args_get_all_records_for_volume_info_to_check_next_api_call_functionality_pass()
+ args['use_python_keys'] = True
+ args['parameters'] = {'fields': '*'}
+ total_records = 5
+ register_responses([
+ ('GET', 'cluster', SRR['validate_ontap_version_pass']),
+ ('GET', 'storage/volumes', SRR['get_subset_info_with_next']),
+ ('GET', '/next_record_api', SRR['get_next_record']),
+ ])
+ assert create_and_apply(ontap_rest_info_module, args)['ontap_info']['storage_volumes']['num_records'] == total_records
+
+
+def test_negative_error_on_get_next():
+ args = set_args_get_all_records_for_volume_info_to_check_next_api_call_functionality_pass()
+ args['use_python_keys'] = True
+ args['parameters'] = {'fields': '*'}
+ total_records = 5
+ register_responses([
+ ('GET', 'cluster', SRR['validate_ontap_version_pass']),
+ ('GET', 'storage/volumes', SRR['get_subset_info_with_next']),
+ ('GET', '/next_record_api', SRR['generic_error']),
+ ])
+ assert create_and_apply(ontap_rest_info_module, args, fail=True)['msg'] == 'Expected error'
+
+
+def test_negative_bad_api():
+ args = set_args_get_all_records_for_volume_info_to_check_next_api_call_functionality_pass()
+ args['use_python_keys'] = True
+ register_responses([
+ ('GET', 'cluster', SRR['validate_ontap_version_pass']),
+ ('GET', 'storage/volumes', SRR['error_invalid_api']),
+ ])
+ assert create_and_apply(ontap_rest_info_module, args)['ontap_info']['storage_volumes'] == 'Invalid API'
+
+
+def test_negative_error_no_aggr_recommendation():
+ args = set_args_get_all_records_for_volume_info_to_check_next_api_call_functionality_pass()
+ args['use_python_keys'] = True
+ register_responses([
+ ('GET', 'cluster', SRR['validate_ontap_version_pass']),
+ ('GET', 'storage/volumes', SRR['error_no_aggr_recommendation']),
+ ])
+ assert create_and_apply(ontap_rest_info_module, args)['ontap_info']['storage_volumes'] == 'No recommendation can be made for this cluster'
+
+
+def test_negative_error_not_authorized():
+ args = set_args_get_all_records_for_volume_info_to_check_next_api_call_functionality_pass()
+ args['use_python_keys'] = True
+ register_responses([
+ ('GET', 'cluster', SRR['validate_ontap_version_pass']),
+ ('GET', 'storage/volumes', SRR['error_user_is_not_authorized']),
+ ])
+ assert 'user is not authorized to make' in create_and_apply(ontap_rest_info_module, args, fail=True)['msg']
+
+
+def test_negative_error_no_processing():
+ args = set_args_get_all_records_for_volume_info_to_check_next_api_call_functionality_pass()
+ args['use_python_keys'] = True
+ register_responses([
+ ('GET', 'cluster', SRR['validate_ontap_version_pass']),
+ ('GET', 'storage/volumes', SRR['error_no_processing']),
+ ])
+ assert create_and_apply(ontap_rest_info_module, args, fail=True)['msg']['message'] == 'error reported as is'
+
+
+def test_strip_dacls():
+ record = {}
+ response = {
+ 'records': [record]
+ }
+ assert ontap_rest_info_module.strip_dacls(response) is None
+ record['acls'] = []
+ assert ontap_rest_info_module.strip_dacls(response) is None
+ record['acls'] = ['junk', 'junk', 'DACL - ACEs']
+ assert ontap_rest_info_module.strip_dacls(response) == []
+ record['acls'] = ['junk', 'junk', 'DACL - ACEs', 'AT-user-0x123']
+ assert ontap_rest_info_module.strip_dacls(response) == [{'access_type': 'AT', 'user_or_group': 'user'}]
+ record['acls'] = ['junk', 'junk', 'DACL - ACEs', 'AT-user-0x123', 'AT2-group-0xABC']
+ assert ontap_rest_info_module.strip_dacls(response) == [{'access_type': 'AT', 'user_or_group': 'user'},
+ {'access_type': 'AT2', 'user_or_group': 'group'}]
+
+
+def test_private_cli_vserver_security_file_directory():
+ args = set_args_get_all_records_for_volume_info_to_check_next_api_call_functionality_pass()
+ args['gather_subset'] = 'private/cli/vserver/security/file-directory'
+ args['use_python_keys'] = True
+ register_responses([
+ ('GET', 'cluster', SRR['validate_ontap_version_pass']),
+ ('GET', 'private/cli/vserver/security/file-directory', SRR['get_private_cli_vserver_security_file_directory_info']),
+ ])
+ assert create_and_apply(ontap_rest_info_module, args)['ontap_info'] == {
+ 'private_cli_vserver_security_file_directory': [{'access_type': 'AT', 'user_or_group': 'user'}]}
+
+
+def test_get_ontap_subset_info_all_with_field():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'some/api', SRR['get_subset_info']),
+ ])
+ my_obj = create_module(ontap_rest_info_module, set_default_args())
+ subset_info = {'subset': {'api_call': 'some/api'}}
+ assert my_obj.get_ontap_subset_info_all('subset', 'fields', subset_info)['num_records'] == 3
+
+
+def test_negative_get_ontap_subset_info_all_bad_subset():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ])
+ my_obj = create_module(ontap_rest_info_module, set_default_args())
+ msg = 'Specified subset bad_subset is not found, supported subsets are []'
+ assert expect_and_capture_ansible_exception(my_obj.get_ontap_subset_info_all, 'fail', 'bad_subset', None, {})['msg'] == msg
+
+
+def test_demo_subset():
+ register_responses([
+ ('GET', 'cluster', SRR['validate_ontap_version_pass']),
+ ('GET', 'cluster/software', SRR['get_subset_info']),
+ ('GET', 'svm/svms', SRR['get_subset_info']),
+ ('GET', 'cluster/nodes', SRR['get_subset_info']),
+ ])
+ assert 'cluster/nodes' in call_main(my_main, set_default_args(), {'gather_subset': 'demo'})['ontap_info']
+
+
+def test_subset_with_default_fields():
+ register_responses([
+ ('GET', 'cluster', SRR['validate_ontap_version_pass']),
+ ('GET', 'storage/aggregates', SRR['get_subset_info']),
+ ])
+ assert 'storage/aggregates' in \
+ create_and_apply(ontap_rest_info_module, set_default_args(), {'gather_subset': 'aggr_efficiency_info'})[
+ 'ontap_info']
+
+
+def test_negative_error_on_post():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('POST', 'api', SRR['generic_error']),
+ ])
+ assert create_module(ontap_rest_info_module, set_default_args()).run_post({'api_call': 'api'}) is None
+
+
+@patch('time.sleep')
+def test_negative_error_on_wait_after_post(sleep_mock):
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('POST', 'api', SRR['metrocluster_post']),
+ ('GET', 'cluster/jobs/fde79888-692a-11ea-80c2-005056b39fe7', SRR['generic_error']),
+ ('GET', 'cluster/jobs/fde79888-692a-11ea-80c2-005056b39fe7', SRR['generic_error']), # retries
+ ('GET', 'cluster/jobs/fde79888-692a-11ea-80c2-005056b39fe7', SRR['generic_error']),
+ ('GET', 'cluster/jobs/fde79888-692a-11ea-80c2-005056b39fe7', SRR['generic_error']),
+ ])
+ my_obj = create_module(ontap_rest_info_module, set_default_args())
+ assert expect_and_capture_ansible_exception(my_obj.run_post, 'fail', {'api_call': 'api'})['msg'] == ' - '.join(
+ ['Expected error'] * 4)
+
+
+def test_owning_resource_snapshot():
+ args = set_default_args()
+ args['gather_subset'] = 'storage/volumes/snapshots'
+ args['owning_resource'] = {'volume_name': 'vol1', 'svm_name': 'svm1'}
+ register_responses([
+ ('GET', 'cluster', SRR['validate_ontap_version_pass']),
+ ('GET', 'storage/volumes', SRR['volume_info']),
+ ('GET', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa/snapshots', SRR['volume_info'])
+ ])
+ assert create_and_apply(ontap_rest_info_module, args)['ontap_info']
+
+
+def test_owning_resource_snapshot_missing_1_resource():
+ args = set_default_args()
+ args['gather_subset'] = 'storage/volumes/snapshots'
+ args['owning_resource'] = {'volume_name': 'vol1'}
+ register_responses([
+ ('GET', 'cluster', SRR['validate_ontap_version_pass']),
+ ])
+ msg = 'Error: volume_name, svm_name are required for storage/volumes/snapshots'
+ assert create_and_apply(ontap_rest_info_module, args, fail=True)['msg'] == msg
+
+
+def test_owning_resource_snapshot_missing_resource():
+ args = set_default_args()
+ args['gather_subset'] = 'storage/volumes/snapshots'
+ register_responses([
+ ('GET', 'cluster', SRR['validate_ontap_version_pass']),
+ ])
+ msg = 'Error: volume_name, svm_name are required for storage/volumes/snapshots'
+ assert create_and_apply(ontap_rest_info_module, args, fail=True)['msg'] == msg
+
+
+def test_owning_resource_snapshot_volume_not_found():
+ args = set_default_args()
+ args['gather_subset'] = 'storage/volumes/snapshots'
+ args['owning_resource'] = {'volume_name': 'vol1', 'svm_name': 'svm1'}
+ register_responses([
+ ('GET', 'cluster', SRR['validate_ontap_version_pass']),
+ ('GET', 'storage/volumes', SRR['generic_error']),
+ ])
+ msg = 'Could not find volume vol1 on SVM svm1'
+ assert create_and_apply(ontap_rest_info_module, args, fail=True)['msg'] == msg
+
+
+def test_owning_resource_vscan_on_access_policies():
+ args = set_default_args()
+ args['gather_subset'] = 'protocols/vscan/on-access-policies'
+ args['owning_resource'] = {'svm_name': 'svm1'}
+ register_responses([
+ ('GET', 'cluster', SRR['validate_ontap_version_pass']),
+ ('GET', 'svm/svms', SRR['svm_uuid']),
+ ('GET', 'protocols/vscan/test_uuid/on-access-policies', SRR['vscan_on_access_policies'])
+ ])
+ assert create_and_apply(ontap_rest_info_module, args)['ontap_info']
+
+
+def test_owning_resource_vscan_on_demand_policies():
+ args = set_default_args()
+ args['gather_subset'] = 'protocols/vscan/on-demand-policies'
+ args['owning_resource'] = {'svm_name': 'svm1'}
+ register_responses([
+ ('GET', 'cluster', SRR['validate_ontap_version_pass']),
+ ('GET', 'svm/svms', SRR['svm_uuid']),
+ ('GET', 'protocols/vscan/test_uuid/on-demand-policies', SRR['vscan_on_access_policies'])
+ ])
+ assert create_and_apply(ontap_rest_info_module, args)['ontap_info']
+
+
+def test_owning_resource_vscan_scanner_pools():
+ args = set_default_args()
+ args['gather_subset'] = 'protocols/vscan/scanner-pools'
+ args['owning_resource'] = {'svm_name': 'svm1'}
+ register_responses([
+ ('GET', 'cluster', SRR['validate_ontap_version_pass']),
+ ('GET', 'svm/svms', SRR['svm_uuid']),
+ ('GET', 'protocols/vscan/test_uuid/scanner-pools', SRR['vscan_scanner_pools'])
+ ])
+ assert create_and_apply(ontap_rest_info_module, args)['ontap_info']
+
+
+def test_owning_resource_export_policies_rules():
+ args = set_default_args()
+ args['gather_subset'] = 'protocols/nfs/export-policies/rules'
+ args['owning_resource'] = {'policy_name': 'policy_name', 'svm_name': 'svm1', 'rule_index': '1'}
+ register_responses([
+ ('GET', 'cluster', SRR['validate_ontap_version_pass']),
+ ('GET', 'protocols/nfs/export-policies', SRR['get_uuid_policy_id_export_policy']),
+ ('GET', 'protocols/nfs/export-policies/123/rules/1', SRR['get_uuid_policy_id_export_policy'])
+ ])
+ assert create_and_apply(ontap_rest_info_module, args)['ontap_info']
+
+
+def test_owning_resource_export_policies_rules_missing_resource():
+ args = set_default_args()
+ args['gather_subset'] = 'protocols/nfs/export-policies/rules'
+ register_responses([
+ ('GET', 'cluster', SRR['validate_ontap_version_pass']),
+ ])
+ msg = 'Error: policy_name, svm_name, rule_index are required for protocols/nfs/export-policies/rules'
+ assert create_and_apply(ontap_rest_info_module, args, fail=True)['msg'] == msg
+
+
+def test_owning_resource_export_policies_rules_missing_1_resource():
+ args = set_default_args()
+ args['gather_subset'] = 'protocols/nfs/export-policies/rules'
+ args['owning_resource'] = {'policy_name': 'policy_name', 'svm_name': 'svm1'}
+ register_responses([
+ ('GET', 'cluster', SRR['validate_ontap_version_pass']),
+ ])
+ msg = 'Error: policy_name, svm_name, rule_index are required for protocols/nfs/export-policies/rules'
+ assert create_and_apply(ontap_rest_info_module, args, fail=True)['msg'] == msg
+
+
+def test_owning_resource_export_policies_rules_policy_not_found():
+ args = set_default_args()
+ args['gather_subset'] = 'protocols/nfs/export-policies/rules'
+ args['owning_resource'] = {'policy_name': 'policy_name', 'svm_name': 'svm1', 'rule_index': '1'}
+ register_responses([
+ ('GET', 'cluster', SRR['validate_ontap_version_pass']),
+ ('GET', 'protocols/nfs/export-policies', SRR['generic_error']),
+ ])
+ msg = 'Could not find export policy policy_name on SVM svm1'
+ assert create_and_apply(ontap_rest_info_module, args, fail=True)['msg'] == msg
+
+
+def test_lun_info_with_serial():
+ args = set_default_args()
+ args['gather_subset'] = 'storage/luns'
+ register_responses([
+ ('GET', 'cluster', SRR['validate_ontap_version_pass']),
+ ('GET', 'storage/luns', SRR['lun_info']),
+ ])
+ info = create_and_apply(ontap_rest_info_module, args)
+ assert 'ontap_info' in info
+ assert 'storage/luns' in info['ontap_info']
+ assert 'records' in info['ontap_info']['storage/luns']
+ records = info['ontap_info']['storage/luns']['records']
+ assert records
+ lun_info = records[0]
+ print('INFO', lun_info)
+ assert lun_info['serial_number'] == 'z6CcD+SK5mPb'
+ assert lun_info['serial_hex'] == '7a364363442b534b356d5062'
+ assert lun_info['naa_id'] == 'naa.600a0980' + '7a364363442b534b356d5062'
+
+
+def test_ignore_api_errors():
+ args = set_default_args()
+ args['gather_subset'] = 'storage/luns'
+ args['ignore_api_errors'] = ['something', 'Expected error']
+ args['fields'] = ['**']
+ register_responses([
+ ('GET', 'cluster', SRR['validate_ontap_version_pass']),
+ ('GET', 'storage/luns', SRR['error_record']),
+ ])
+ info = create_and_apply(ontap_rest_info_module, args)
+ assert 'ontap_info' in info
+ assert 'storage/luns' in info['ontap_info']
+ assert 'error' in info['ontap_info']['storage/luns']
+ error = info['ontap_info']['storage/luns']['error']
+ assert error
+ assert error['code'] == 6
+ assert error['message'] == 'Expected error'
+ print_warnings()
+ assert_warning_was_raised('Using ** can put an extra load on the system and should not be used in production')
+
+
+def test_private_cli_fields():
+ register_responses([
+ ('GET', 'cluster', SRR['validate_ontap_version_pass']),
+ ])
+ args = set_default_args()
+ my_obj = create_module(ontap_rest_info_module, args)
+ error = 'Internal error, no field for unknown_api'
+ assert error in expect_and_capture_ansible_exception(my_obj.private_cli_fields, 'fail', 'unknown_api')['msg']
+ assert my_obj.private_cli_fields('private/cli/vserver/security/file-directory') == 'acls'
+ assert my_obj.private_cli_fields('support/autosupport/check') == 'node,corrective-action,status,error-detail,check-type,check-category'
+ my_obj.parameters['fields'] = ['f1', 'f2']
+ assert my_obj.private_cli_fields('private/cli/vserver/security/file-directory') == 'f1,f2'
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_restit.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_restit.py
new file mode 100644
index 000000000..89289386a
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_restit.py
@@ -0,0 +1,346 @@
+# (c) 2021, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests ONTAP Ansible module: na_ontap_cluster '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch, call
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_restit \
+ import NetAppONTAPRestAPI as my_module, main as my_main # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+SRR = {
+ # common responses
+ 'is_rest': (200, dict(version=dict(generation=9, major=7, minor=0, full='dummy_9_7_0')), None),
+ 'is_rest_95': (200, dict(version=dict(generation=9, major=5, minor=0, full='dummy_9_5_0')), None),
+ 'is_rest_96': (200, dict(version=dict(generation=9, major=6, minor=0, full='dummy_9_6_0')), None),
+ 'is_rest_97': (200, dict(version=dict(generation=9, major=7, minor=0, full='dummy_9_7_0')), None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': ({}, None, None),
+ 'zero_record': (200, {'records': []}, None),
+ 'job_id_record': (
+ 200, {
+ 'job': {
+ 'uuid': '94b6e6a7-d426-11eb-ac81-00505690980f',
+ '_links': {'self': {'href': '/api/cluster/jobs/94b6e6a7-d426-11eb-ac81-00505690980f'}}},
+ 'cli_output': ' Use the "job show -id 2379" command to view the status of this operation.'}, None),
+ 'job_response_record': (
+ 200, {
+ "uuid": "f03ccbb6-d8bb-11eb-ac81-00505690980f",
+ "description": "File Directory Security Apply Job",
+ "state": "success",
+ "message": "Complete: Operation completed successfully. File ACLs modified using policy \"policy1\" on Vserver \"GBSMNAS80LD\". File count: 0. [0]",
+ "code": 0,
+ "start_time": "2021-06-29T05:25:26-04:00",
+ "end_time": "2021-06-29T05:25:26-04:00"
+ }, None),
+ 'job_response_record_running': (
+ 200, {
+ "uuid": "f03ccbb6-d8bb-11eb-ac81-00505690980f",
+ "description": "File Directory Security Apply Job",
+ "state": "running",
+ "message": "Complete: Operation completed successfully. File ACLs modified using policy \"policy1\" on Vserver \"GBSMNAS80LD\". File count: 0. [0]",
+ "code": 0,
+ "start_time": "2021-06-29T05:25:26-04:00",
+ "end_time": "2021-06-29T05:25:26-04:00"
+ }, None),
+ 'job_response_record_failure': (
+ 200, {
+ "uuid": "f03ccbb6-d8bb-11eb-ac81-00505690980f",
+ "description": "File Directory Security Apply Job",
+ "state": "failure",
+ "message": "Forcing some error for UT.",
+ "code": 0,
+ "start_time": "2021-06-29T05:25:26-04:00",
+ "end_time": "2021-06-29T05:25:26-04:00"
+ }, None),
+ 'generic_error': (500, None, "Expected error"),
+ 'rest_error': (400, None, {'message': '-error_message-', 'code': '-error_code-'}),
+ 'end_of_sequence': (None, None, "Unexpected call to send_request"),
+}
+
+
+def set_default_args(use_rest='auto'):
+ hostname = '10.10.10.10'
+ username = 'admin'
+ password = 'password'
+ api = 'abc'
+ return dict({
+ 'hostname': hostname,
+ 'username': username,
+ 'password': password,
+ 'api': api,
+ 'use_rest': use_rest
+ })
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_run_default_get(mock_request, patch_ansible):
+ ''' if no method is given, GET is the default '''
+ args = dict(set_default_args())
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is True
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 1
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_run_any(mock_request, patch_ansible):
+ ''' We don't validate the method name, so ANYthing goes '''
+ args = dict(set_default_args())
+ args['method'] = 'ANY'
+ args['body'] = {'bkey1': 'bitem1', 'bkey2': 'bitem2'}
+ args['query'] = {'qkey1': 'qitem1', 'qkey2': 'qitem2'}
+ args['files'] = {'fkey1': 'fitem1', 'fkey2': 'fitem2'}
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is True
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 1
+ headers = my_obj.rest_api.build_headers(accept='application/json')
+ expected_call = call('ANY', 'abc', args['query'], args['body'], headers, args['files'])
+ assert expected_call in mock_request.mock_calls
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_negative_run_any_rest_error(mock_request, patch_ansible):
+ ''' We don't validate the method name, so ANYthing goes '''
+ args = dict(set_default_args())
+ args['method'] = 'ANY'
+ args['body'] = {'bkey1': 'bitem1', 'bkey2': 'bitem2'}
+ args['query'] = {'qkey1': 'qitem1', 'qkey2': 'qitem2'}
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['rest_error'],
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ msg = "Error when calling 'abc': check error_message and error_code for details."
+ assert msg == exc.value.args[0]['msg']
+ assert '-error_message-' == exc.value.args[0]['error_message']
+ assert '-error_code-' == exc.value.args[0]['error_code']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_negative_run_any_other_error(mock_request, patch_ansible):
+ ''' We don't validate the method name, so ANYthing goes '''
+ args = dict(set_default_args())
+ args['method'] = 'ANY'
+ args['body'] = {'bkey1': 'bitem1', 'bkey2': 'bitem2'}
+ args['query'] = {'qkey1': 'qitem1', 'qkey2': 'qitem2'}
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['generic_error'],
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ msg = "Error when calling 'abc': Expected error"
+ assert msg == exc.value.args[0]['msg']
+ assert 'Expected error' == exc.value.args[0]['error_message']
+ assert exc.value.args[0]['error_code'] is None
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_run_post_async_no_job(mock_request, patch_ansible):
+ ''' POST async, but returns immediately '''
+ args = dict(set_default_args())
+ args['method'] = 'POST'
+ args['body'] = {'bkey1': 'bitem1', 'bkey2': 'bitem2'}
+ args['query'] = {'qkey1': 'qitem1', 'qkey2': 'qitem2'}
+ args['wait_for_completion'] = True
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is True
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 1
+ headers = my_obj.rest_api.build_headers(accept='application/json')
+ args['query'].update({'return_timeout': 30})
+ expected_call = call('POST', 'abc', args['query'], json=args['body'], headers=headers, files=None)
+ assert expected_call in mock_request.mock_calls
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_run_post_async_with_job(mock_request, patch_ansible):
+ ''' POST async, but returns immediately '''
+ args = dict(set_default_args())
+ args['method'] = 'POST'
+ args['body'] = {'bkey1': 'bitem1', 'bkey2': 'bitem2'}
+ args['query'] = {'qkey1': 'qitem1', 'qkey2': 'qitem2'}
+ args['wait_for_completion'] = True
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['job_id_record'],
+ SRR['job_response_record'],
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is True
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 2
+ headers = my_obj.rest_api.build_headers(accept='application/json')
+ args['query'].update({'return_timeout': 30})
+ expected_call = call('POST', 'abc', args['query'], json=args['body'], headers=headers, files=None)
+ assert expected_call in mock_request.mock_calls
+
+
+# patch time to not wait between job retries
+@patch('time.sleep')
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_run_patch_async_with_job_loop(mock_request, mock_sleep, patch_ansible):
+ ''' POST async, but returns immediately '''
+ args = dict(set_default_args())
+ args['method'] = 'PATCH'
+ args['body'] = {'bkey1': 'bitem1', 'bkey2': 'bitem2'}
+ args['query'] = {'qkey1': 'qitem1', 'qkey2': 'qitem2'}
+ args['wait_for_completion'] = True
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['job_id_record'],
+ SRR['job_response_record_running'],
+ SRR['job_response_record'],
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is True
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 3
+ headers = my_obj.rest_api.build_headers(accept='application/json')
+ args['query'].update({'return_timeout': 30})
+ expected_call = call('PATCH', 'abc', args['query'], json=args['body'], headers=headers, files=None)
+ assert expected_call in mock_request.mock_calls
+
+
+@patch('time.sleep')
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_run_negative_delete(mock_request, mock_sleep, patch_ansible):
+ ''' POST async, but returns immediately '''
+ args = dict(set_default_args())
+ args['method'] = 'DELETE'
+ args['body'] = {'bkey1': 'bitem1', 'bkey2': 'bitem2'}
+ args['query'] = {'qkey1': 'qitem1', 'qkey2': 'qitem2'}
+ args['wait_for_completion'] = True
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['job_id_record'],
+ SRR['job_response_record_running'],
+ SRR['job_response_record_failure'],
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ msg = "Error when calling 'abc': Forcing some error for UT."
+ assert msg == exc.value.args[0]['msg']
+ assert 'Forcing some error for UT.' == exc.value.args[0]['error_message']
+ assert exc.value.args[0]['error_code'] is None
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 3
+ headers = my_obj.rest_api.build_headers(accept='application/json')
+ args['query'].update({'return_timeout': 30})
+ expected_call = call('DELETE', 'abc', args['query'], json=None, headers=headers)
+ assert expected_call in mock_request.mock_calls
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_run_any_async(mock_request, patch_ansible):
+ ''' We don't validate the method name, so ANYthing goes '''
+ args = dict(set_default_args())
+ args['method'] = 'ANY'
+ args['body'] = {'bkey1': 'bitem1', 'bkey2': 'bitem2'}
+ args['query'] = {'qkey1': 'qitem1', 'qkey2': 'qitem2'}
+ args['files'] = {'fkey1': 'fitem1', 'fkey2': 'fitem2'}
+ args['wait_for_completion'] = True
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is True
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 1
+ headers = my_obj.rest_api.build_headers(accept='application/json')
+ expected_call = call('ANY', 'abc', args['query'], args['body'], headers, args['files'])
+ assert expected_call in mock_request.mock_calls
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_run_main(mock_request, patch_ansible):
+ ''' We don't validate the method name, so ANYthing goes '''
+ args = dict(set_default_args())
+ args['method'] = 'ANY'
+ args['body'] = {'bkey1': 'bitem1', 'bkey2': 'bitem2'}
+ args['query'] = {'qkey1': 'qitem1', 'qkey2': 'qitem2'}
+ args['wait_for_completion'] = True
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_main()
+ assert exc.value.args[0]['changed'] is True
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 1
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_build_headers(mock_request, patch_ansible):
+ ''' create cluster '''
+ args = dict(set_default_args())
+ set_module_args(args)
+ my_obj = my_module()
+ headers = my_obj.build_headers()
+ # TODO: in UT (and only in UT) module._name is not set properly. It shows as basic.py instead of 'na_ontap_restit'
+ assert headers == {'X-Dot-Client-App': 'basic.py/%s' % netapp_utils.COLLECTION_VERSION, 'accept': 'application/json'}
+ args['hal_linking'] = True
+ set_module_args(args)
+ my_obj = my_module()
+ headers = my_obj.build_headers()
+ assert headers == {'X-Dot-Client-App': 'basic.py/%s' % netapp_utils.COLLECTION_VERSION, 'accept': 'application/hal+json'}
+ # Accept header
+ args['accept_header'] = "multipart/form-data"
+ set_module_args(args)
+ my_obj = my_module()
+ headers = my_obj.build_headers()
+ assert headers == {'X-Dot-Client-App': 'basic.py/%s' % netapp_utils.COLLECTION_VERSION, 'accept': 'multipart/form-data'}
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_s3_buckets.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_s3_buckets.py
new file mode 100644
index 000000000..2e15239da
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_s3_buckets.py
@@ -0,0 +1,739 @@
+# (c) 2022-2023, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import sys
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args, \
+ patch_ansible, create_and_apply, create_module, expect_and_capture_ansible_exception
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke, \
+ register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_s3_buckets \
+ import NetAppOntapS3Buckets as my_module, main as my_main # module under test
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+SRR = rest_responses({
+ 'nas_s3_bucket': (200, {"records": [{
+ 'comment': '',
+ 'name': 'carchi-test-bucket1',
+ 'nas_path': '/',
+ 'policy': {
+ 'statements': [
+ {
+ 'actions': ['GetObject', 'PutObject', 'DeleteObject', 'ListBucket'],
+ 'conditions': [
+ {'operator': 'ip_address', 'source_ips': ['1.1.1.1/32', '1.2.2.0/24']},
+ ],
+ 'effect': 'deny',
+ 'principals': [],
+ 'resources': ['carchi-test-bucket1', 'carchi-test-bucket1/*'],
+ 'sid': 1
+ }
+ ]
+ },
+ 'svm': {
+ 'name': 'ansibleSVM',
+ 'uuid': '685bd228'
+ },
+ 'type': 'nas',
+ 'uuid': '3e5c4ac8'}], "num_records": 1}, None),
+ 'nas_s3_bucket_modify': (200, {"records": [{
+ 'comment': '',
+ 'name': 'carchi-test-bucket1',
+ 'nas_path': '/',
+ 'policy': {'statements': []},
+ 'svm': {
+ 'name': 'ansibleSVM',
+ 'uuid': '685bd228'
+ },
+ 'type': 'nas',
+ 'uuid': '3e5c4ac8'}], "num_records": 1}, None),
+ 's3_bucket_more_policy': (200, {"records": [{
+ 'comment': 'carchi8py was here again',
+ 'name': 'bucket1',
+ 'policy': {
+ 'statements': [
+ {
+ "sid": 1,
+ "actions": ["GetObject", "PutObject", "DeleteObject", "ListBucket"],
+ "effect": "deny",
+ "conditions": [{"operator": "ip_address", "source_ips": ["1.1.1.1/32", "1.2.2.0/24"]}],
+ "principals": ["user1", "user2"],
+ "resources": ["bucket1", "bucket1/*"]
+ },
+ {
+ "sid": 2,
+ "actions": ["GetObject", "PutObject", "DeleteObject", "ListBucket"],
+ "effect": "deny",
+ "conditions": [{"operator": "ip_address", "source_ips": ["1.1.1.1/32", "1.2.2.0/24"]}],
+ "principals": ["user1", "user2"],
+ "resources": ["bucket1", "bucket1/*"]
+ }
+ ]
+ },
+ 'qos_policy': {
+ 'max_throughput_iops': 100,
+ 'max_throughput_mbps': 150,
+ 'min_throughput_iops': 0,
+ 'min_throughput_mbps': 0,
+ 'name': 'ansibleSVM_auto_gen_policy_9be26687_2849_11ed_9696_005056b3b297',
+ 'uuid': '9be28517-2849-11ed-9696-005056b3b297'
+ },
+ 'size': 938860800,
+ 'svm': {'name': 'ansibleSVM', 'uuid': '969ansi97'},
+ 'uuid': '9bdefd59-2849-11ed-9696-005056b3b297',
+ 'type': 's3',
+ 'volume': {'uuid': '1cd8a442-86d1-11e0-abcd-123478563412'}}], "num_records": 1}, None),
+ 's3_bucket_without_condition': (200, {"records": [{
+ 'comment': 'carchi8py was here again',
+ 'name': 'bucket1',
+ 'policy': {
+ 'statements': [
+ {
+ "sid": 1,
+ "actions": ["GetObject", "PutObject", "DeleteObject", "ListBucket"],
+ "effect": "deny",
+ "principals": ["user1", "user2"],
+ "resources": ["bucket1", "bucket1/*"]
+ },
+ {
+ "sid": 2,
+ "actions": ["GetObject", "PutObject", "DeleteObject", "ListBucket"],
+ "effect": "deny",
+ "principals": ["user1", "user2"],
+ "resources": ["bucket1", "bucket1/*"]
+ }
+ ]
+ },
+ 'qos_policy': {
+ 'max_throughput_iops': 100,
+ 'max_throughput_mbps': 150,
+ 'min_throughput_iops': 0,
+ 'min_throughput_mbps': 0,
+ 'name': 'ansibleSVM_auto_gen_policy_9be26687_2849_11ed_9696_005056b3b297',
+ 'uuid': '9be28517-2849-11ed-9696-005056b3b297'
+ },
+ 'size': 938860800,
+ 'svm': {'name': 'ansibleSVM', 'uuid': '969ansi97'},
+ 'uuid': '9bdefd59-2849-11ed-9696-005056b3b297',
+ 'volume': {'uuid': '1cd8a442-86d1-11e0-abcd-123478563412'}}], "num_records": 1}, None),
+ 's3_bucket_9_10': (200, {
+ "logical_used_size": 0,
+ "uuid": "414b29a1-3b26-11e9-bd58-0050568ea055",
+ "size": 1677721600,
+ "protection_status": {"destination": {}},
+ "constituents_per_aggregate": 4,
+ "qos_policy": {
+ "max_throughput_iops": 10000,
+ "max_throughput_mbps": 500,
+ "name": "performance",
+ "min_throughput_iops": 2000,
+ "min_throughput_mbps": 500,
+ "uuid": "1cd8a442-86d1-11e0-ae1c-123478563412"
+ },
+ "policy": {
+ "statements": [
+ {
+ "sid": "FullAccessToUser1",
+ "resources": ["bucket1", "bucket1/*"],
+ "actions": ["GetObject", "PutObject", "DeleteObject", "ListBucket"],
+ "effect": "allow",
+ "conditions": [
+ {
+ "operator": "ip-address",
+ "max_keys": ["1000"],
+ "delimiters": ["/"],
+ "source-ips": ["1.1.1.1", "1.2.2.0/24"],
+ "prefixes": ["pref"],
+ "usernames": ["user1"]
+ }
+ ],
+ "principals": ["user1", "group/grp1"]
+ }
+ ]
+ },
+ "storage_service_level": "value",
+ "audit_event_selector": {"access": "all", "permission": "all"},
+ "name": "bucket1",
+ "comment": "S3 bucket.",
+ "svm": {"name": "svm1", "uuid": "02c9e252-41be-11e9-81d5-00a0986138f7"},
+ "volume": {"uuid": "1cd8a442-86d1-11e0-abcd-123478563412"}
+ }, None),
+ 's3_bucket_9_8': (200, {
+ "logical_used_size": 0,
+ "uuid": "414b29a1-3b26-11e9-bd58-0050568ea055",
+ "size": 1677721600,
+ "protection_status": {"destination": {}},
+ "constituents_per_aggregate": 4,
+ "qos_policy": {
+ "max_throughput_iops": 10000,
+ "max_throughput_mbps": 500,
+ "name": "performance",
+ "min_throughput_iops": 2000,
+ "min_throughput_mbps": 500,
+ "uuid": "1cd8a442-86d1-11e0-ae1c-123478563412"
+ },
+ "policy": {
+ "statements": [
+ {
+ "sid": "FullAccessToUser1",
+ "resources": ["bucket1", "bucket1/*"],
+ "actions": ["GetObject", "PutObject", "DeleteObject", "ListBucket"],
+ "effect": "allow",
+ "conditions": [
+ {
+ "operator": "ip-address",
+ "max_keys": ["1000"],
+ "delimiters": ["/"],
+ "source-ips": ["1.1.1.1", "1.2.2.0/24"],
+ "prefixes": ["pref"],
+ "usernames": ["user1"]
+ }
+ ],
+ "principals": ["user1", "group/grp1"]
+ }
+ ]
+ },
+ "storage_service_level": "value",
+ "name": "bucket1",
+ "comment": "S3 bucket.",
+ "svm": {"name": "svm1", "uuid": "02c9e252-41be-11e9-81d5-00a0986138f7"},
+ "volume": {"uuid": "1cd8a442-86d1-11e0-abcd-123478563412"}
+ }, None),
+ 'volume_info': (200, {
+ "aggregates": [{"name": "aggr1", "uuid": "1cd8a442-86d1-11e0-ae1c-123478563412"}],
+ }, None),
+})
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'name': 'bucket1',
+ 'vserver': 'vserver'
+}
+
+POLICY_ARGS = {
+ "statements": [{
+ "sid": "FullAccessToUser1",
+ "resources": ["bucket1", "bucket1/*"],
+ "actions": ["GetObject", "PutObject", "DeleteObject", "ListBucket"],
+ "effect": "allow",
+ "conditions": [
+ {
+ "operator": "ip_address",
+ "max_keys": ["1000"],
+ "delimiters": ["/"],
+ "source_ips": ["1.1.1.1", "1.2.2.0/24"],
+ "prefixes": ["pref"],
+ "usernames": ["user1"]
+ }
+ ],
+ "principals": ["user1", "group/grp1"]
+ }]
+}
+
+REAL_POLICY_ARGS = {
+ "statements": [{
+ "sid": "FullAccessToUser1",
+ "resources": ["bucket1", "bucket1/*"],
+ "actions": ["GetObject", "PutObject", "DeleteObject", "ListBucket"],
+ "effect": "allow",
+ "conditions": [{"operator": "ip_address", "source_ips": ["1.1.1.1", "1.2.2.0/24"]}],
+ "principals": ["user1", "group/grp1"]
+ }]
+}
+
+REAL_POLICY_WTIH_NUM_ARGS = {
+ "statements": [{
+ "sid": 1,
+ "resources": ["bucket1", "bucket1/*"],
+ "actions": ["GetObject", "PutObject", "DeleteObject", "ListBucket"],
+ "effect": "allow",
+ "conditions": [{"operator": "ip_address", "source_ips": ["1.1.1.1", "1.2.2.0/24"]}],
+ "principals": ["user1", "group/grp1"]
+ }]
+}
+
+MODIFY_POLICY_ARGS = {
+ "statements": [{
+ "sid": "FullAccessToUser1",
+ "resources": ["bucket1", "bucket1/*"],
+ "actions": ["GetObject", "PutObject", "DeleteObject", "ListBucket"],
+ "effect": "allow",
+ "conditions": [
+ {
+ "operator": "ip_address",
+ "max_keys": ["100"],
+ "delimiters": ["/"],
+ "source_ips": ["2.2.2.2", "1.2.2.0/24"],
+ "prefixes": ["pref"],
+ "usernames": ["user2"]
+ }
+ ],
+ "principals": ["user1", "group/grp1"]
+ }]
+}
+
+
+MULTIPLE_POLICY_STATEMENTS = {
+ "statements": [
+ {
+ "sid": 1,
+ "actions": ["GetObject", "PutObject", "DeleteObject", "ListBucket"],
+ "effect": "deny",
+ "conditions": [{"operator": "ip_address", "source_ips": ["1.1.1.1", "1.2.2.0/24"]}],
+ "principals": ["user1", "user2"],
+ "resources": ["*"]
+ },
+ {
+ "sid": 2,
+ "actions": ["GetObject", "PutObject", "DeleteObject", "ListBucket"],
+ "effect": "deny",
+ "conditions": [{"operator": "ip_address", "source_ips": ["1.1.1.1", "1.2.2.0/24"]}],
+ "principals": ["user1", "user2"],
+ "resources": ["*"]
+ }
+ ]
+}
+
+
+SAME_POLICY_STATEMENTS = {
+ "statements": [
+ {
+ "sid": 1,
+ "actions": ["GetObject", "PutObject", "DeleteObject", "ListBucket"],
+ "effect": "deny",
+ "conditions": [{"operator": "ip_address", "source_ips": ["1.1.1.1", "1.2.2.0/24"]}],
+ "principals": ["user1", "user2"],
+ "resources": ["*"]
+ },
+ {
+ "sid": 1,
+ "actions": ["GetObject", "PutObject", "DeleteObject", "ListBucket"],
+ "effect": "deny",
+ "conditions": [{"operator": "ip_address", "source_ips": ["1.1.1.1", "1.2.2.0/24"]}],
+ "principals": ["user1", "user2"],
+ "resources": ["*"]
+ },
+ ]
+}
+
+
+MULTIPLE_POLICY_CONDITIONS = {
+ "statements": [
+ {
+ "sid": 1,
+ "actions": ["GetObject", "PutObject", "DeleteObject", "ListBucket"],
+ "effect": "deny",
+ "conditions": [
+ {"operator": "ip_address", "source_ips": ["1.1.1.1", "1.2.2.0/24"]},
+ {"operator": "not_ip_address", "source_ips": ["2.1.1.1", "1.2.2.0/24"]}
+ ],
+ "principals": ["user1", "user2"],
+ "resources": ["*"]
+ },
+ {
+ "sid": 2,
+ "actions": ["GetObject", "PutObject", "DeleteObject", "ListBucket"],
+ "effect": "deny",
+ "conditions": [{"operator": "ip_address", "source_ips": ["1.1.1.1", "1.2.2.0/24"]}],
+ "principals": ["user1", "user2"],
+ "resources": ["*"]
+ }
+ ]
+}
+
+
+NAS_S3_BUCKET = {
+ 'comment': '',
+ 'name': 'carchi-test-bucket1',
+ 'nas_path': '/',
+ 'policy': {
+ 'statements': [
+ {
+ 'actions': ['GetObject', 'PutObject', 'DeleteObject', 'ListBucket'],
+ 'conditions': [{'operator': 'ip_address', 'source_ips': ['1.1.1.1/32', '1.2.2.0/24']}],
+ 'effect': 'deny',
+ 'principals': [],
+ 'resources': ['carchi-test-bucket1', 'carchi-test-bucket1/*'],
+ 'sid': 1
+ }
+ ]
+ },
+ 'vserver': 'ansibleSVM',
+ 'type': 'nas'
+}
+
+
+QOS_ARGS = {
+ "max_throughput_iops": 10000,
+ "max_throughput_mbps": 500,
+ "name": "performance",
+ "min_throughput_iops": 2000,
+ "min_throughput_mbps": 500,
+}
+
+MODIFY_QOS_ARGS = {
+ "max_throughput_iops": 20000,
+ "max_throughput_mbps": 400,
+ "name": "performance",
+ "min_throughput_iops": 3000,
+ "min_throughput_mbps": 400,
+}
+
+AUDIT_EVENT = {
+ "access": "all",
+ "permission": "all"
+}
+
+MODIFY_AUDIT_EVENT = {
+ "access": "read",
+ "permission": "allow"
+}
+
+
+def test_low_version():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97'])
+ ])
+ error = create_module(my_module, DEFAULT_ARGS, fail=True)['msg']
+ print('Info: %s' % error)
+ msg = 'Error: na_ontap_s3_bucket only supports REST, and requires ONTAP 9.8.0 or later. Found: 9.7.0.'
+ assert msg in error
+
+
+def test_get_s3_bucket_none():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/s3/buckets', SRR['empty_records'])
+ ])
+ set_module_args(DEFAULT_ARGS)
+ my_obj = my_module()
+ assert my_obj.get_s3_bucket() is None
+
+
+def test_get_s3_bucket_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/s3/buckets', SRR['generic_error'])
+ ])
+ my_module_object = create_module(my_module, DEFAULT_ARGS)
+ msg = 'Error fetching S3 bucket bucket1: calling: protocols/s3/buckets: got Expected error.'
+ assert msg in expect_and_capture_ansible_exception(my_module_object.get_s3_bucket, 'fail')['msg']
+
+
+def test_get_s3_bucket_9_8():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'protocols/s3/buckets', SRR['s3_bucket_9_8'])
+ ])
+ set_module_args(DEFAULT_ARGS)
+ my_obj = my_module()
+ assert my_obj.get_s3_bucket() is not None
+
+
+def test_get_s3_bucket_9_10():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/s3/buckets', SRR['s3_bucket_9_10'])
+ ])
+ set_module_args(DEFAULT_ARGS)
+ my_obj = my_module()
+ assert my_obj.get_s3_bucket() is not None
+
+
+def test_create_s3_bucket_9_8():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'protocols/s3/buckets', SRR['empty_records']),
+ ('POST', 'protocols/s3/buckets', SRR['empty_good'])
+ ])
+ module_args = {'comment': 'carchi8py was here',
+ 'aggregates': ['aggr1'],
+ 'constituents_per_aggregate': 4,
+ 'size': 838860800,
+ 'policy': POLICY_ARGS,
+ 'qos_policy': QOS_ARGS}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_create_s3_bucket_9_10_and_9_12():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/s3/buckets', SRR['empty_records']),
+ ('POST', 'protocols/s3/buckets', SRR['empty_good']),
+ # create with type
+ ('GET', 'cluster', SRR['is_rest_9_12_1']),
+ ('GET', 'protocols/s3/buckets', SRR['empty_records']),
+ ('POST', 'protocols/s3/buckets', SRR['empty_good'])
+ ])
+ module_args = {'comment': 'carchi8py was here',
+ 'aggregates': ['aggr1'],
+ 'constituents_per_aggregate': 4,
+ 'size': 838860800,
+ 'policy': POLICY_ARGS,
+ 'qos_policy': QOS_ARGS,
+ 'audit_event_selector': AUDIT_EVENT}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+ module_args['type'] = 's3'
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_s3_nas_bucket_create_modify_delete():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_12_1']),
+ ('GET', 'protocols/s3/buckets', SRR['empty_records']),
+ ('POST', 'protocols/s3/buckets', SRR['success']),
+ # idemptent check
+ ('GET', 'cluster', SRR['is_rest_9_12_1']),
+ ('GET', 'protocols/s3/buckets', SRR['nas_s3_bucket']),
+ # modify empty policy
+ ('GET', 'cluster', SRR['is_rest_9_12_1']),
+ ('GET', 'protocols/s3/buckets', SRR['nas_s3_bucket']),
+ ('PATCH', 'protocols/s3/buckets/685bd228/3e5c4ac8', SRR['success']),
+ # idempotent check
+ ('GET', 'cluster', SRR['is_rest_9_12_1']),
+ ('GET', 'protocols/s3/buckets', SRR['nas_s3_bucket_modify']),
+ # delete nas bucket
+ ('GET', 'cluster', SRR['is_rest_9_12_1']),
+ ('GET', 'protocols/s3/buckets', SRR['nas_s3_bucket_modify']),
+ ('DELETE', 'protocols/s3/buckets/685bd228/3e5c4ac8', SRR['success'])
+ ])
+ assert create_and_apply(my_module, DEFAULT_ARGS, NAS_S3_BUCKET)['changed']
+ assert create_and_apply(my_module, DEFAULT_ARGS, NAS_S3_BUCKET)['changed'] is False
+ NAS_S3_BUCKET['policy']['statements'] = []
+ assert create_and_apply(my_module, DEFAULT_ARGS, NAS_S3_BUCKET)['changed']
+ assert create_and_apply(my_module, DEFAULT_ARGS, NAS_S3_BUCKET)['changed'] is False
+ NAS_S3_BUCKET['state'] = 'absent'
+ assert create_and_apply(my_module, DEFAULT_ARGS, NAS_S3_BUCKET)['changed']
+
+
+def test_modify_s3_bucket_type_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_12_1']),
+ ('GET', 'protocols/s3/buckets', SRR['s3_bucket_more_policy'])
+ ])
+ assert 'Error: cannot modify bucket type.' in create_and_apply(my_module, DEFAULT_ARGS, {'type': 'nas'}, fail=True)['msg']
+
+
+def test_create_with_real_policy_s3_bucket_9_10():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/s3/buckets', SRR['empty_records']),
+ ('POST', 'protocols/s3/buckets', SRR['empty_good'])
+ ])
+ module_args = {'comment': 'carchi8py was here',
+ 'aggregates': ['aggr1'],
+ 'constituents_per_aggregate': 4,
+ 'size': 838860800,
+ 'policy': REAL_POLICY_ARGS,
+ 'qos_policy': QOS_ARGS,
+ 'audit_event_selector': AUDIT_EVENT}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_create_with_real_policy_with_sid_as_number_s3_bucket_9_10():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/s3/buckets', SRR['empty_records']),
+ ('POST', 'protocols/s3/buckets', SRR['empty_good'])
+ ])
+ module_args = {'comment': 'carchi8py was here',
+ 'aggregates': ['aggr1'],
+ 'constituents_per_aggregate': 4,
+ 'size': 838860800,
+ 'policy': REAL_POLICY_WTIH_NUM_ARGS,
+ 'qos_policy': QOS_ARGS,
+ 'audit_event_selector': AUDIT_EVENT}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_create_s3_bucket_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('POST', 'protocols/s3/buckets', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['comment'] = 'carchi8py was here'
+ my_obj.parameters['aggregates'] = ['aggr1']
+ my_obj.parameters['constituents_per_aggregate'] = 4
+ my_obj.parameters['size'] = 838860800
+ error = expect_and_capture_ansible_exception(my_obj.create_s3_bucket, 'fail')['msg']
+ print('Info: %s' % error)
+ assert 'Error creating S3 bucket bucket1: calling: protocols/s3/buckets: got Expected error.' == error
+
+
+def test_delete_s3_bucket():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/s3/buckets', SRR['s3_bucket_9_10']),
+ ('DELETE', 'protocols/s3/buckets/02c9e252-41be-11e9-81d5-00a0986138f7/414b29a1-3b26-11e9-bd58-0050568ea055',
+ SRR['empty_good'])
+ ])
+ module_args = {'state': 'absent'}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_delete_s3_bucket_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('DELETE', 'protocols/s3/buckets/02c9e252-41be-11e9-81d5-00a0986138f7/414b29a1-3b26-11e9-bd58-0050568ea055',
+ SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['state'] = 'absent'
+ my_obj.uuid = '414b29a1-3b26-11e9-bd58-0050568ea055'
+ my_obj.svm_uuid = '02c9e252-41be-11e9-81d5-00a0986138f7'
+ error = expect_and_capture_ansible_exception(my_obj.delete_s3_bucket, 'fail')['msg']
+ print('Info: %s' % error)
+ assert 'Error deleting S3 bucket bucket1: calling: ' \
+ 'protocols/s3/buckets/02c9e252-41be-11e9-81d5-00a0986138f7/414b29a1-3b26-11e9-bd58-0050568ea055: got Expected error.' == error
+
+
+def test_modify_s3_bucket_9_8():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'protocols/s3/buckets', SRR['s3_bucket_9_8']),
+ ('GET', 'storage/volumes/1cd8a442-86d1-11e0-abcd-123478563412', SRR['volume_info']),
+ ('PATCH', 'protocols/s3/buckets/02c9e252-41be-11e9-81d5-00a0986138f7/414b29a1-3b26-11e9-bd58-0050568ea055',
+ SRR['empty_good'])
+ ])
+ module_args = {'comment': 'carchi8py was here',
+ 'size': 943718400,
+ 'policy': MODIFY_POLICY_ARGS,
+ 'qos_policy': MODIFY_QOS_ARGS}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_s3_bucket_9_10():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/s3/buckets', SRR['s3_bucket_9_10']),
+ ('GET', 'storage/volumes/1cd8a442-86d1-11e0-abcd-123478563412', SRR['volume_info']),
+ ('PATCH', 'protocols/s3/buckets/02c9e252-41be-11e9-81d5-00a0986138f7/414b29a1-3b26-11e9-bd58-0050568ea055',
+ SRR['empty_good'])
+ ])
+ module_args = {'comment': 'carchi8py was here',
+ 'size': 943718400,
+ 'policy': MODIFY_POLICY_ARGS,
+ 'qos_policy': MODIFY_QOS_ARGS,
+ 'audit_event_selector': MODIFY_AUDIT_EVENT}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_s3_bucket_policy_statements():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/s3/buckets', SRR['s3_bucket_9_10']),
+ ('GET', 'storage/volumes/1cd8a442-86d1-11e0-abcd-123478563412', SRR['volume_info']),
+ ('PATCH', 'protocols/s3/buckets/02c9e252-41be-11e9-81d5-00a0986138f7/414b29a1-3b26-11e9-bd58-0050568ea055',
+ SRR['empty_good']),
+ # add multiple statements.
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/s3/buckets', SRR['s3_bucket_more_policy']),
+ ('GET', 'storage/volumes/1cd8a442-86d1-11e0-abcd-123478563412', SRR['volume_info']),
+ # try to modify with identical statements.
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/s3/buckets', SRR['s3_bucket_more_policy']),
+ ('GET', 'storage/volumes/1cd8a442-86d1-11e0-abcd-123478563412', SRR['volume_info']),
+ ('PATCH', 'protocols/s3/buckets/969ansi97/9bdefd59-2849-11ed-9696-005056b3b297', SRR['empty_good']),
+ # empty policy statements.
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/s3/buckets', SRR['s3_bucket_9_10']),
+ ('GET', 'storage/volumes/1cd8a442-86d1-11e0-abcd-123478563412', SRR['volume_info']),
+ ('PATCH', 'protocols/s3/buckets/02c9e252-41be-11e9-81d5-00a0986138f7/414b29a1-3b26-11e9-bd58-0050568ea055',
+ SRR['empty_good'])
+ ])
+ module_args = {'policy': MULTIPLE_POLICY_STATEMENTS}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+ module_args = {'policy': SAME_POLICY_STATEMENTS}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+ assert create_and_apply(my_module, DEFAULT_ARGS, {'policy': {'statements': []}})
+
+
+def test_modify_s3_bucket_policy_statements_conditions():
+ register_responses([
+ # modify if desired statements has conditions and current statement conditions is None.
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/s3/buckets', SRR['s3_bucket_without_condition']),
+ ('GET', 'storage/volumes/1cd8a442-86d1-11e0-abcd-123478563412', SRR['volume_info']),
+ ('PATCH', 'protocols/s3/buckets/969ansi97/9bdefd59-2849-11ed-9696-005056b3b297', SRR['empty_good']),
+ # empty policy statements conditions.
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/s3/buckets', SRR['s3_bucket_more_policy']),
+ ('GET', 'storage/volumes/1cd8a442-86d1-11e0-abcd-123478563412', SRR['volume_info']),
+ ('PATCH', 'protocols/s3/buckets/969ansi97/9bdefd59-2849-11ed-9696-005056b3b297', SRR['empty_good']),
+ # add multiple conditions.
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/s3/buckets', SRR['s3_bucket_more_policy']),
+ ('GET', 'storage/volumes/1cd8a442-86d1-11e0-abcd-123478563412', SRR['volume_info']),
+ ('PATCH', 'protocols/s3/buckets/969ansi97/9bdefd59-2849-11ed-9696-005056b3b297', SRR['empty_good'])
+ ])
+ module_args = {'policy': MULTIPLE_POLICY_STATEMENTS}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+ module_args = {'policy': MULTIPLE_POLICY_STATEMENTS.copy()}
+ module_args['policy']['statements'][0]['conditions'] = []
+ module_args['policy']['statements'][1]['conditions'] = []
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+ module_args = {'policy': MULTIPLE_POLICY_CONDITIONS}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_when_try_set_empty_dict_to_policy():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1'])
+ ])
+ module_args = {'policy': {'statements': [{}]}}
+ assert 'cannot set empty dict' in create_module(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_modify_s3_bucket_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('PATCH', 'protocols/s3/buckets/02c9e252-41be-11e9-81d5-00a0986138f7/414b29a1-3b26-11e9-bd58-0050568ea055',
+ SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['comment'] = 'carchi8py was here'
+ my_obj.parameters['size'] = 943718400
+ current = {'comment': 'carchi8py was here', 'size': 943718400}
+ my_obj.uuid = '414b29a1-3b26-11e9-bd58-0050568ea055'
+ my_obj.svm_uuid = '02c9e252-41be-11e9-81d5-00a0986138f7'
+ error = expect_and_capture_ansible_exception(my_obj.modify_s3_bucket, 'fail', current)['msg']
+ print('Info: %s' % error)
+ assert 'Error modifying S3 bucket bucket1: calling: ' \
+ 'protocols/s3/buckets/02c9e252-41be-11e9-81d5-00a0986138f7/414b29a1-3b26-11e9-bd58-0050568ea055: got Expected error.' == error
+
+
+def test_new_aggr_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'protocols/s3/buckets', SRR['s3_bucket_9_8']),
+ ('GET', 'storage/volumes/1cd8a442-86d1-11e0-abcd-123478563412', SRR['volume_info']),
+ ])
+ module_args = {'aggregates': ['aggr2']}
+ error = 'Aggregates cannot be modified for S3 bucket bucket1'
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == error
+
+
+def test_volume_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'protocols/s3/buckets', SRR['s3_bucket_9_8']),
+ ('GET', 'storage/volumes/1cd8a442-86d1-11e0-abcd-123478563412', SRR['generic_error']),
+ ])
+ module_args = {'aggregates': ['aggr2']}
+ error = 'calling: storage/volumes/1cd8a442-86d1-11e0-abcd-123478563412: got Expected error.'
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == error
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_s3_groups.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_s3_groups.py
new file mode 100644
index 000000000..6b204eadd
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_s3_groups.py
@@ -0,0 +1,319 @@
+# (c) 2022-2023, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import sys
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args, \
+ patch_ansible, create_and_apply, create_module, expect_and_capture_ansible_exception
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import get_mock_record, \
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_s3_groups \
+ import NetAppOntapS3Groups as my_module, main as my_main # module under test
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+SRR = rest_responses({
+ 's3_group_no_user_policy': (200, {
+ "records": [
+ {
+ "comment": "Admin group",
+ "name": "carchi8py_group",
+ "id": "5",
+ "svm": {
+ "name": "svm1",
+ "uuid": "e3cb5c7f-cd20"
+ }
+ }
+ ],
+ "num_records": 1
+ }, None),
+ 's3_group': (200, {
+ "records": [
+ {
+ "comment": "Admin group",
+ "name": "carchi8py_group",
+ "users": [
+ {
+ "name": "carchi8py",
+ "_links": {
+ "self": {
+ "href": "/api/resourcelink"
+ }
+ },
+ }
+ ],
+ "policies": [
+ {
+ "name": "my_policy",
+ "_links": {
+ "self": {
+ "href": "/api/resourcelink"
+ }
+ },
+ }
+ ],
+ "id": "5",
+ "svm": {
+ "name": "svm1",
+ "uuid": "e3cb5c7f-cd20"
+ }
+ }
+ ],
+ "num_records": 1
+ }, None),
+ 's3_group2': (200, {
+ "records": [
+ {
+ "comment": "Admin group",
+ "name": "carchi8py_group",
+ "users": [
+ {
+ "name": "carchi8py",
+ "_links": {
+ "self": {
+ "href": "/api/resourcelink"
+ }
+ },
+ },
+ {
+ "name": "user2",
+ "_links": {
+ "self": {
+ "href": "/api/resourcelink"
+ }
+ },
+ }
+ ],
+ "policies": [
+ {
+ "name": "my_policy",
+ "_links": {
+ "self": {
+ "href": "/api/resourcelink"
+ }
+ },
+ },
+ {
+ "name": "my_policy2",
+ "_links": {
+ "self": {
+ "href": "/api/resourcelink"
+ }
+ },
+ }
+ ],
+ "id": "5",
+ "svm": {
+ "name": "svm1",
+ "uuid": "e3cb5c7f-cd20"
+ }
+ }
+ ],
+ "num_records": 1
+ }, None),
+ 'svm_uuid': (200, {"records": [
+ {
+ 'uuid': 'e3cb5c7f-cd20'
+ }], "num_records": 1}, None)
+})
+
+USER = {
+ 'name': 'carchi8py'
+}
+
+POLICY = {
+ 'name': 'my_policy'
+}
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'name': 'carchi8py_group',
+ 'vserver': 'vserver',
+ 'users': [USER],
+ 'policies': [POLICY]
+}
+
+
+def test_low_version():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'cluster', SRR['is_rest_97'])
+ ])
+ error = create_module(my_module, DEFAULT_ARGS, fail=True)['msg']
+ print('Info: %s' % error)
+ msg = 'Error: na_ontap_s3_groups only supports REST, and requires ONTAP 9.8.0 or later. Found: 9.7.0.'
+ assert msg in error
+
+
+def test_get_s3_groups_none():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_uuid']),
+ ('GET', 'protocols/s3/services/e3cb5c7f-cd20/groups', SRR['empty_records'])
+ ])
+ set_module_args(DEFAULT_ARGS)
+ my_obj = my_module()
+ assert my_obj.get_s3_groups() is None
+
+
+def test_get_s3_groups_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_uuid']),
+ ('GET', 'protocols/s3/services/e3cb5c7f-cd20/groups', SRR['generic_error'])
+ ])
+ my_module_object = create_module(my_module, DEFAULT_ARGS)
+ msg = 'Error fetching S3 groups carchi8py_group: calling: protocols/s3/services/e3cb5c7f-cd20/groups: got Expected error.'
+ assert msg in expect_and_capture_ansible_exception(my_module_object.get_s3_groups, 'fail')['msg']
+
+
+def test_create_s3_group():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_uuid']),
+ ('GET', 'protocols/s3/services/e3cb5c7f-cd20/groups', SRR['empty_records']),
+ ('POST', 'protocols/s3/services/e3cb5c7f-cd20/groups', SRR['empty_good'])
+ ])
+ module_args = {
+ 'comment': 'this is a s3 group',
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_create_s3_group_with_multi_user_policies():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_uuid']),
+ ('GET', 'protocols/s3/services/e3cb5c7f-cd20/groups', SRR['empty_records']),
+ ('POST', 'protocols/s3/services/e3cb5c7f-cd20/groups', SRR['empty_good'])
+ ])
+ module_args = {
+ 'comment': 'this is a s3 group',
+ 'users': [{'name': 'carchi8py'}, {'name': 'foo'}],
+ 'policies': [{'name': 'policy1'}, {'name': 'policy2'}]
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_create_s3_group_error_no_users():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_uuid']),
+ ('GET', 'protocols/s3/services/e3cb5c7f-cd20/groups', SRR['empty_records']),
+ ])
+ args = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'name': 'carchi8py_group',
+ 'vserver': 'vserver',
+ 'policies': [POLICY]
+ }
+ error = create_and_apply(my_module, args, {}, 'fail')['msg']
+ print('Info: %s' % error)
+ assert 'policies and users are required for a creating a group.' == error
+
+
+def test_create_s3_group_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('POST', 'protocols/s3/services/e3cb5c7f-cd20/groups', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['comment'] = 'this is a s3 group'
+ my_obj.svm_uuid = 'e3cb5c7f-cd20'
+ error = expect_and_capture_ansible_exception(my_obj.create_s3_groups, 'fail')['msg']
+ print('Info: %s' % error)
+ assert 'Error creating S3 groups carchi8py_group: calling: protocols/s3/services/e3cb5c7f-cd20/groups: got Expected error.' == error
+
+
+def test_delete_s3_group():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_uuid']),
+ ('GET', 'protocols/s3/services/e3cb5c7f-cd20/groups', SRR['s3_group']),
+ ('DELETE', 'protocols/s3/services/e3cb5c7f-cd20/groups/5', SRR['empty_good'])
+ ])
+ module_args = {'state': 'absent'}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_delete_s3_group_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('DELETE', 'protocols/s3/services/e3cb5c7f-cd20/groups/5', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['state'] = 'absent'
+ my_obj.svm_uuid = 'e3cb5c7f-cd20'
+ my_obj.group_id = 5
+ error = expect_and_capture_ansible_exception(my_obj.delete_s3_groups, 'fail')['msg']
+ print('Info: %s' % error)
+ assert 'Error deleting S3 group carchi8py_group: calling: protocols/s3/services/e3cb5c7f-cd20/groups/5: got Expected error.' == error
+
+
+def test_modify_s3_group():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_uuid']),
+ ('GET', 'protocols/s3/services/e3cb5c7f-cd20/groups', SRR['s3_group']),
+ ('PATCH', 'protocols/s3/services/e3cb5c7f-cd20/groups/5', SRR['empty_good'])
+ ])
+ module_args = {
+ 'comment': 'this is a modify comment',
+ 'users': [{'name': 'carchi8py'}, {'name': 'user2'}],
+ 'policies': [{'name': 'policy1'}, {'name': 'policy2'}]
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_s3_group_no_current_user_policy():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_uuid']),
+ ('GET', 'protocols/s3/services/e3cb5c7f-cd20/groups', SRR['s3_group_no_user_policy']),
+ ('PATCH', 'protocols/s3/services/e3cb5c7f-cd20/groups/5', SRR['empty_good'])
+ ])
+ module_args = {
+ 'users': [{'name': 'carchi8py'}, {'name': 'user2'}],
+ 'policies': [{'name': 'policy1'}, {'name': 'policy2'}]
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_s3_group_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('PATCH', 'protocols/s3/services/e3cb5c7f-cd20/groups/5', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['comment'] = 'this is a modified s3 service'
+ current = {'comment': 'this is a modified s3 service'}
+ my_obj.svm_uuid = 'e3cb5c7f-cd20'
+ my_obj.group_id = 5
+ error = expect_and_capture_ansible_exception(my_obj.modify_s3_groups, 'fail', current)['msg']
+ print('Info: %s' % error)
+ assert 'Error modifying S3 group carchi8py_group: calling: protocols/s3/services/e3cb5c7f-cd20/groups/5: got Expected error.' == error
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_s3_policies.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_s3_policies.py
new file mode 100644
index 000000000..eacb4e8c1
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_s3_policies.py
@@ -0,0 +1,220 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import sys
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args, \
+ patch_ansible, create_and_apply, create_module, expect_and_capture_ansible_exception
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import get_mock_record, \
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_s3_policies \
+ import NetAppOntapS3Policies as my_module, main as my_main # module under test
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+SRR = rest_responses({
+ 's3_policy': (200, {
+ "records": [
+ {
+ "statements": [
+ {
+ "sid": "FullAccessToBucket1",
+ "resources": [
+ "bucket1",
+ "bucket1/*"
+ ],
+ "index": 0,
+ "actions": [
+ "GetObject",
+ "PutObject",
+ "DeleteObject",
+ "ListBucket"
+ ],
+ "effect": "allow"
+ }
+ ],
+ "comment": "S3 policy.",
+ "name": "Policy1",
+ "svm": {
+ "name": "policy_name",
+ "uuid": "02c9e252-41be-11e9-81d5-00a0986138f7"
+ },
+ "read-only": True
+ }
+ ],
+ "num_records": 1
+ }, None),
+ 'svm_uuid': (200, {"records": [
+ {
+ 'uuid': 'e3cb5c7f-cd20'
+ }], "num_records": 1}, None)
+})
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'name': 'policy_name',
+ 'vserver': 'vserver'
+}
+
+STATEMENT = {
+ "sid": "FullAccessToUser1",
+ "resources": [
+ "bucket1",
+ "bucket1/*"
+ ],
+ "actions": [
+ "GetObject",
+ "PutObject",
+ "DeleteObject",
+ "ListBucket"
+ ],
+ "effect": "allow",
+}
+
+STATEMENT2 = {
+ "sid": "FullAccessToUser1",
+ "resources": [
+ "bucket1",
+ "bucket1/*",
+ "bucket2",
+ "bucket2/*"
+ ],
+ "actions": [
+ "GetObject",
+ "PutObject",
+ "DeleteObject",
+ "ListBucket"
+ ],
+ "effect": "allow",
+}
+
+
+def test_low_version():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'cluster', SRR['is_rest_97'])
+ ])
+ error = create_module(my_module, DEFAULT_ARGS, fail=True)['msg']
+ print('Info: %s' % error)
+ msg = 'Error: na_ontap_s3_policies only supports REST, and requires ONTAP 9.8.0 or later. Found: 9.7.0.'
+ assert msg in error
+
+
+def test_get_s3_policies_none():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_uuid']),
+ ('GET', 'protocols/s3/services/e3cb5c7f-cd20/policies', SRR['empty_records'])
+ ])
+ set_module_args(DEFAULT_ARGS)
+ my_obj = my_module()
+ assert my_obj.get_s3_policies() is None
+
+
+def test_get_s3_policies_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_uuid']),
+ ('GET', 'protocols/s3/services/e3cb5c7f-cd20/policies', SRR['generic_error'])
+ ])
+ my_module_object = create_module(my_module, DEFAULT_ARGS)
+ msg = 'Error fetching S3 policy policy_name: calling: protocols/s3/services/e3cb5c7f-cd20/policies: got Expected error.'
+ assert msg in expect_and_capture_ansible_exception(my_module_object.get_s3_policies, 'fail')['msg']
+
+
+def test_create_s3_policies():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_uuid']),
+ ('GET', 'protocols/s3/services/e3cb5c7f-cd20/policies', SRR['empty_records']),
+ ('POST', 'protocols/s3/services/e3cb5c7f-cd20/policies', SRR['empty_good'])
+ ])
+ module_args = {
+ 'comment': 'this is a s3 user',
+ 'statements': [STATEMENT]
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_create_s3_policies_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('POST', 'protocols/s3/services/e3cb5c7f-cd20/policies', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['comment'] = 'this is a s3 policies'
+ my_obj.parameters['statements'] = [STATEMENT]
+ my_obj.svm_uuid = 'e3cb5c7f-cd20'
+ error = expect_and_capture_ansible_exception(my_obj.create_s3_policies, 'fail')['msg']
+ print('Info: %s' % error)
+ assert 'Error creating S3 policy policy_name: calling: protocols/s3/services/e3cb5c7f-cd20/policies: got Expected error.' == error
+
+
+def test_delete_s3_policies():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_uuid']),
+ ('GET', 'protocols/s3/services/e3cb5c7f-cd20/policies', SRR['s3_policy']),
+ ('DELETE', 'protocols/s3/services/e3cb5c7f-cd20/policies/policy_name', SRR['empty_good'])
+ ])
+ module_args = {'state': 'absent'}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_delete_s3_policies_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('DELETE', 'protocols/s3/services/e3cb5c7f-cd20/policies/policy_name', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['state'] = 'absent'
+ my_obj.svm_uuid = 'e3cb5c7f-cd20'
+ error = expect_and_capture_ansible_exception(my_obj.delete_s3_policies, 'fail')['msg']
+ print('Info: %s' % error)
+ assert 'Error deleting S3 policy policy_name: calling: protocols/s3/services/e3cb5c7f-cd20/policies/policy_name: got Expected error.' == error
+
+
+def test_modify_s3_policies():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_uuid']),
+ ('GET', 'protocols/s3/services/e3cb5c7f-cd20/policies', SRR['s3_policy']),
+ ('PATCH', 'protocols/s3/services/e3cb5c7f-cd20/policies/policy_name', SRR['empty_good'])
+ ])
+ module_args = {'comment': 'this is a modify comment', 'statements': [STATEMENT2]}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_s3_policies_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('PATCH', 'protocols/s3/services/e3cb5c7f-cd20/policies/policy_name', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['comment'] = 'this is a modified s3 service'
+ my_obj.parameters['statements'] = [STATEMENT2]
+ current = {'comment': 'this is a modified s3 service', 'statements': [STATEMENT2]}
+ my_obj.svm_uuid = 'e3cb5c7f-cd20'
+ error = expect_and_capture_ansible_exception(my_obj.modify_s3_policies, 'fail', current)['msg']
+ print('Info: %s' % error)
+ assert 'Error modifying S3 policy policy_name: calling: protocols/s3/services/e3cb5c7f-cd20/policies/policy_name: got Expected error.' == error
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_s3_services.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_s3_services.py
new file mode 100644
index 000000000..fce59093a
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_s3_services.py
@@ -0,0 +1,176 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import sys
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args, \
+ patch_ansible, create_and_apply, create_module, expect_and_capture_ansible_exception
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import get_mock_record, \
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_s3_services \
+ import NetAppOntapS3Services as my_module, main as my_main # module under test
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+SRR = rest_responses({
+ 's3_service': (200, {
+ "svm": {
+ "uuid": "08c8a385-b1ac-11ec-bd2e-005056b3b297",
+ "name": "ansibleSVM",
+ },
+ "name": "carchi-test",
+ "enabled": True,
+ "buckets": [
+ {
+ "name": "carchi-test-bucket2"
+ },
+ {
+ "name": "carchi-test-bucket"
+ }
+ ],
+ "users": [
+ {
+ "name": "root"
+ }
+ ],
+ "comment": "this is a s3 service",
+ "certificate": {
+ "name": "ansibleSVM_16E1C1284D889609",
+ },
+ }, None)
+})
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'name': 'service1',
+ 'vserver': 'vserver'
+}
+
+
+def test_low_version():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'cluster', SRR['is_rest_97'])
+ ])
+ error = create_module(my_module, DEFAULT_ARGS, fail=True)['msg']
+ print('Info: %s' % error)
+ msg = 'Error: na_ontap_s3_services only supports REST, and requires ONTAP 9.8.0 or later. Found: 9.7.0.'
+ assert msg in error
+
+
+def test_get_s3_service_none():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/s3/services', SRR['empty_records'])
+ ])
+ set_module_args(DEFAULT_ARGS)
+ my_obj = my_module()
+ assert my_obj.get_s3_service() is None
+
+
+def test_get_s3_service_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/s3/services', SRR['generic_error'])
+ ])
+ my_module_object = create_module(my_module, DEFAULT_ARGS)
+ msg = 'Error fetching S3 service service1: calling: protocols/s3/services: got Expected error.'
+ assert msg in expect_and_capture_ansible_exception(my_module_object.get_s3_service, 'fail')['msg']
+
+
+def test_create_s3_service():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/s3/services', SRR['empty_records']),
+ ('POST', 'protocols/s3/services', SRR['empty_good'])
+ ])
+ module_args = {
+ 'enabled': True,
+ 'comment': 'this is a s3 service',
+ 'certificate_name': 'cert1',
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_create_s3_service_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('POST', 'protocols/s3/services', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['enabled'] = True
+ my_obj.parameters['comment'] = 'this is a s3 service'
+ my_obj.parameters['certificate_name'] = 'cert1'
+ error = expect_and_capture_ansible_exception(my_obj.create_s3_service, 'fail')['msg']
+ print('Info: %s' % error)
+ assert 'Error creating S3 service service1: calling: protocols/s3/services: got Expected error.' == error
+
+
+def test_delete_s3_service():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/s3/services', SRR['s3_service']),
+ ('DELETE', 'protocols/s3/services/08c8a385-b1ac-11ec-bd2e-005056b3b297', SRR['empty_good'])
+ ])
+ module_args = {'state': 'absent'}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_delete_s3_service_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('DELETE', 'protocols/s3/services/08c8a385-b1ac-11ec-bd2e-005056b3b297', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['state'] = 'absent'
+ my_obj.svm_uuid = '08c8a385-b1ac-11ec-bd2e-005056b3b297'
+ error = expect_and_capture_ansible_exception(my_obj.delete_s3_service, 'fail')['msg']
+ print('Info: %s' % error)
+ assert 'Error deleting S3 service service1: calling: protocols/s3/services/08c8a385-b1ac-11ec-bd2e-005056b3b297: got Expected error.' == error
+
+
+def test_modify_s3_service():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/s3/services', SRR['s3_service']),
+ ('PATCH', 'protocols/s3/services/08c8a385-b1ac-11ec-bd2e-005056b3b297', SRR['empty_good'])
+ ])
+ module_args = {'comment': 'this is a modified s3 service',
+ 'enabled': False,
+ 'certificate_name': 'cert2',
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_s3_service_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('PATCH', 'protocols/s3/services/08c8a385-b1ac-11ec-bd2e-005056b3b297', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['comment'] = 'this is a modified s3 service'
+ current = {'comment': 'this is a modified s3 service'}
+ my_obj.svm_uuid = '08c8a385-b1ac-11ec-bd2e-005056b3b297'
+ error = expect_and_capture_ansible_exception(my_obj.modify_s3_service, 'fail', current)['msg']
+ print('Info: %s' % error)
+ assert 'Error modifying S3 service service1: calling: protocols/s3/services/08c8a385-b1ac-11ec-bd2e-005056b3b297: got Expected error.' == error
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_s3_users.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_s3_users.py
new file mode 100644
index 000000000..71850e510
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_s3_users.py
@@ -0,0 +1,194 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import sys
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args, \
+ patch_ansible, create_and_apply, create_module, expect_and_capture_ansible_exception
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import get_mock_record, \
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_s3_users \
+ import NetAppOntapS3Users as my_module, main as my_main # module under test
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+SRR = rest_responses({
+ 's3_user': (200, {
+ "records": [
+ {
+ "comment": "S3 user",
+ "name": "carchi8py",
+ "svm": {
+ "name": "svm1",
+ "uuid": "e3cb5c7f-cd20"
+ }
+ }
+ ],
+ "num_records": 1
+ }, None),
+ 's3_user_created': (200, {
+ "records": [
+ {
+ 'access_key': 'random_access_key',
+ 'secret_key': 'random_secret_key'
+ }
+ ],
+ "num_records": 1
+ }, None),
+ 'svm_uuid': (200, {"records": [
+ {
+ 'uuid': 'e3cb5c7f-cd20'
+ }], "num_records": 1}, None)
+})
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'name': 'carchi8py',
+ 'vserver': 'vserver'
+}
+
+
+def test_low_version():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'cluster', SRR['is_rest_97'])
+ ])
+ error = create_module(my_module, DEFAULT_ARGS, fail=True)['msg']
+ print('Info: %s' % error)
+ msg = 'Error: na_ontap_s3_users only supports REST, and requires ONTAP 9.8.0 or later. Found: 9.7.0.'
+ assert msg in error
+
+
+def test_get_s3_users_none():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_uuid']),
+ ('GET', 'protocols/s3/services/e3cb5c7f-cd20/users', SRR['empty_records'])
+ ])
+ set_module_args(DEFAULT_ARGS)
+ my_obj = my_module()
+ assert my_obj.get_s3_user() is None
+
+
+def test_get_s3_users_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_uuid']),
+ ('GET', 'protocols/s3/services/e3cb5c7f-cd20/users', SRR['generic_error'])
+ ])
+ my_module_object = create_module(my_module, DEFAULT_ARGS)
+ msg = 'Error fetching S3 user carchi8py: calling: protocols/s3/services/e3cb5c7f-cd20/users: got Expected error.'
+ assert msg in expect_and_capture_ansible_exception(my_module_object.get_s3_user, 'fail')['msg']
+
+
+def test_create_s3_users():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_uuid']),
+ ('GET', 'protocols/s3/services/e3cb5c7f-cd20/users', SRR['empty_records']),
+ ('POST', 'protocols/s3/services/e3cb5c7f-cd20/users', SRR['s3_user_created'])
+ ])
+ module_args = {
+ 'comment': 'this is a s3 user',
+ }
+ result = create_and_apply(my_module, DEFAULT_ARGS, module_args)
+ assert result['changed']
+ assert result['secret_key'] == 'random_secret_key'
+ assert result['access_key'] == 'random_access_key'
+
+
+def test_create_s3_users_fail_randomly():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_uuid']),
+ ('GET', 'protocols/s3/services/e3cb5c7f-cd20/users', SRR['empty_records']),
+ ('POST', 'protocols/s3/services/e3cb5c7f-cd20/users', SRR['empty_good'])
+ ])
+ module_args = {
+ 'comment': 'this is a s3 user',
+ }
+ error = create_and_apply(my_module, DEFAULT_ARGS, module_args, 'fail')['msg']
+ assert 'Error creating S3 user carchi8py' == error
+
+
+def test_create_s3_user_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('POST', 'protocols/s3/services/e3cb5c7f-cd20/users', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['comment'] = 'this is a s3 user'
+ my_obj.svm_uuid = 'e3cb5c7f-cd20'
+ error = expect_and_capture_ansible_exception(my_obj.create_s3_user, 'fail')['msg']
+ print('Info: %s' % error)
+ assert 'Error creating S3 user carchi8py: calling: protocols/s3/services/e3cb5c7f-cd20/users: got Expected error.' == error
+
+
+def test_delete_s3_user():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_uuid']),
+ ('GET', 'protocols/s3/services/e3cb5c7f-cd20/users', SRR['s3_user']),
+ ('DELETE', 'protocols/s3/services/e3cb5c7f-cd20/users/carchi8py', SRR['empty_good'])
+ ])
+ module_args = {'state': 'absent'}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_delete_s3_user_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('DELETE', 'protocols/s3/services/e3cb5c7f-cd20/users/carchi8py', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['state'] = 'absent'
+ my_obj.svm_uuid = 'e3cb5c7f-cd20'
+ error = expect_and_capture_ansible_exception(my_obj.delete_s3_user, 'fail')['msg']
+ print('Info: %s' % error)
+ assert 'Error deleting S3 user carchi8py: calling: protocols/s3/services/e3cb5c7f-cd20/users/carchi8py: got Expected error.' == error
+
+
+def test_modify_s3_user():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['svm_uuid']),
+ ('GET', 'protocols/s3/services/e3cb5c7f-cd20/users', SRR['s3_user']),
+ ('PATCH', 'protocols/s3/services/e3cb5c7f-cd20/users/carchi8py', SRR['empty_good'])
+ ])
+ module_args = {'comment': 'this is a modify comment'}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_s3_user_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('PATCH', 'protocols/s3/services/e3cb5c7f-cd20/users/carchi8py', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['comment'] = 'this is a modified s3 service'
+ current = {'comment': 'this is a modified s3 service'}
+ my_obj.svm_uuid = 'e3cb5c7f-cd20'
+ error = expect_and_capture_ansible_exception(my_obj.modify_s3_user, 'fail', current)['msg']
+ print('Info: %s' % error)
+ assert 'Error modifying S3 user carchi8py: calling: protocols/s3/services/e3cb5c7f-cd20/users/carchi8py: got Expected error.' == error
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_security_certificates.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_security_certificates.py
new file mode 100644
index 000000000..866dd3a58
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_security_certificates.py
@@ -0,0 +1,509 @@
+# (c) 2019-2023, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+""" unit tests for Ansible module: na_ontap_security_certificates """
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import copy
+import pytest
+import sys
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_security_certificates \
+ import NetAppOntapSecurityCertificates as my_module, main as my_main # module under test
+
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+
+# REST API canned responses when mocking send_request
+SRR = {
+ # common responses
+ 'is_rest': (200, {}, None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'end_of_sequence': (500, None, "Unexpected call to send_request"),
+ 'generic_error': (400, None, "Expected error"),
+ # module specific responses
+ 'empty_records': (200, {'records': []}, None),
+ 'get_uuid': (200, {'records': [{'uuid': 'ansible'}]}, None),
+ 'get_multiple_records': (200, {'records': [{'uuid': 'ansible'}, {'uuid': 'second'}]}, None),
+ 'error_unexpected_name': (200, None, {'message': 'Unexpected argument "name".'}),
+ 'error_duplicate_entry': (200, None, {'message': 'duplicate entry', 'target': 'uuid'}),
+ 'error_some_error': (200, None, {'message': 'some error'}),
+}
+
+NAME_ERROR = "Error calling API: security/certificates - ONTAP 9.6 and 9.7 do not support 'name'. Use 'common_name' and 'type' as a work-around."
+TYPE_ERROR = "Error calling API: security/certificates - When using 'common_name', 'type' is required."
+EXPECTED_ERROR = "Error calling API: security/certificates - Expected error"
+
+
+def set_default_args():
+ return dict({
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'name': 'name_for_certificate'
+ })
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ set_module_args({})
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_ensure_get_certificate_called(mock_request):
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_uuid'],
+ SRR['end_of_sequence']
+ ]
+ set_module_args(set_default_args())
+ my_obj = my_module()
+ assert my_obj.get_certificate() is not None
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_error(mock_request):
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['generic_error'],
+ SRR['end_of_sequence']
+ ]
+ set_module_args(set_default_args())
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_main()
+ assert exc.value.args[0]['msg'] == EXPECTED_ERROR
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_create_failed(mock_request):
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_uuid'], # validate data vserver exist.
+ SRR['empty_records'], # get certificate -> not found
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ data = {
+ 'type': 'client_ca',
+ 'vserver': 'abc',
+ }
+ data.update(set_default_args())
+ set_module_args(data)
+ my_obj = my_module()
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ msg = 'Error creating or installing certificate: one or more of the following options are missing:'
+ assert exc.value.args[0]['msg'].startswith(msg)
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_successful_create(mock_request):
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_uuid'], # validate data vserver exist.
+ SRR['empty_records'], # get certificate -> not found
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ data = {
+ 'type': 'client_ca',
+ 'vserver': 'abc',
+ 'common_name': 'cname'
+ }
+ data.update(set_default_args())
+ set_module_args(data)
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_idempotent_create(mock_request):
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_uuid'], # validate data vserver exist.
+ SRR['get_uuid'], # get certificate -> found
+ SRR['end_of_sequence']
+ ]
+ data = {
+ 'type': 'client_ca',
+ 'vserver': 'abc',
+ }
+ data.update(set_default_args())
+ set_module_args(data)
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert not exc.value.args[0]['changed']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_negative_create_duplicate_entry(mock_request):
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['empty_records'], # get certificate -> not found
+ copy.deepcopy(SRR['error_duplicate_entry']), # code under test modifies error in place
+ SRR['end_of_sequence']
+ ]
+ data = {
+ 'type': 'client_ca',
+ 'common_name': 'cname'
+ }
+ data.update(set_default_args())
+ set_module_args(data)
+ my_obj = my_module()
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ print('EXC', exc.value.args[0]['msg'])
+ for fragment in ('Error creating or installing certificate: {',
+ "'message': 'duplicate entry. Same certificate may already exist under a different name.'",
+ "'target': 'cluster'"):
+ assert fragment in exc.value.args[0]['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_successful_delete(mock_request):
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_uuid'], # get certificate -> found
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ data = {
+ 'state': 'absent',
+ }
+ data.update(set_default_args())
+ set_module_args(data)
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_idempotent_delete(mock_request):
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['empty_records'], # get certificate -> not found
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ data = {
+ 'state': 'absent',
+ }
+ data.update(set_default_args())
+ set_module_args(data)
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert not exc.value.args[0]['changed']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_negative_delete(mock_request):
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_uuid'], # get certificate -> found
+ SRR['error_some_error'],
+ SRR['end_of_sequence']
+ ]
+ data = {
+ 'state': 'absent',
+ }
+ data.update(set_default_args())
+ set_module_args(data)
+ my_obj = my_module()
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ msg = "Error deleting certificate: {'message': 'some error'}"
+ assert msg == exc.value.args[0]['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_negative_multiple_records(mock_request):
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_multiple_records'], # get certificate -> 2 records!
+ SRR['end_of_sequence']
+ ]
+ data = {
+ 'state': 'absent',
+ 'common_name': 'cname',
+ 'type': 'client_ca',
+ }
+ data.update(set_default_args())
+ data.pop('name')
+ set_module_args(data)
+ my_obj = my_module()
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ msg = "Duplicate records with same common_name are preventing safe operations: {'records': [{'uuid': 'ansible'}, {'uuid': 'second'}]}"
+ assert msg == exc.value.args[0]['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_successful_sign(mock_request):
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_uuid'],
+ SRR['get_uuid'], # get certificate -> found
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ data = {
+ 'vserver': 'abc',
+ 'signing_request': 'CSR',
+ 'expiry_time': 'et'
+ }
+ data.update(set_default_args())
+ set_module_args(data)
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_negative_sign(mock_request):
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_uuid'],
+ SRR['get_uuid'], # get certificate -> found
+ SRR['error_some_error'],
+ SRR['end_of_sequence']
+ ]
+ data = {
+ 'vserver': 'abc',
+ 'signing_request': 'CSR',
+ 'expiry_time': 'et'
+ }
+ data.update(set_default_args())
+ set_module_args(data)
+ my_obj = my_module()
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ msg = "Error signing certificate: {'message': 'some error'}"
+ assert msg == exc.value.args[0]['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_failed_sign_missing_ca(mock_request):
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_uuid'],
+ SRR['empty_records'], # get certificate -> not found
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ data = {
+ 'vserver': 'abc',
+ 'signing_request': 'CSR'
+ }
+ data.update(set_default_args())
+ set_module_args(data)
+ my_obj = my_module()
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ msg = "signing certificate with name '%s' not found on svm: %s" % (data['name'], data['vserver'])
+ assert exc.value.args[0]['msg'] == msg
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_failed_sign_absent(mock_request):
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_uuid'],
+ SRR['get_uuid'], # get certificate -> found
+ SRR['end_of_sequence']
+ ]
+ data = {
+ 'vserver': 'abc',
+ 'signing_request': 'CSR',
+ 'state': 'absent'
+ }
+ data.update(set_default_args())
+ set_module_args(data)
+ my_obj = my_module()
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ msg = "'signing_request' is not supported with 'state' set to 'absent'"
+ assert exc.value.args[0]['msg'] == msg
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_failed_on_name(mock_request):
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_uuid'],
+ SRR['error_unexpected_name'], # get certificate -> error
+ SRR['end_of_sequence']
+ ]
+ data = {
+ 'vserver': 'abc',
+ 'signing_request': 'CSR',
+ 'state': 'absent',
+ 'ignore_name_if_not_supported': False,
+ 'common_name': 'common_name',
+ 'type': 'root_ca'
+ }
+ data.update(set_default_args())
+ set_module_args(data)
+ my_obj = my_module()
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['msg'] == NAME_ERROR
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_cannot_ignore_name_error_no_common_name(mock_request):
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_uuid'],
+ SRR['error_unexpected_name'], # get certificate -> error
+ SRR['end_of_sequence']
+ ]
+ data = {
+ 'vserver': 'abc',
+ 'signing_request': 'CSR',
+ 'state': 'absent',
+ }
+ data.update(set_default_args())
+ set_module_args(data)
+ my_obj = my_module()
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['msg'] == NAME_ERROR
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_cannot_ignore_name_error_no_type(mock_request):
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_uuid'],
+ SRR['error_unexpected_name'], # get certificate -> error
+ SRR['end_of_sequence']
+ ]
+ data = {
+ 'vserver': 'abc',
+ 'signing_request': 'CSR',
+ 'state': 'absent',
+ 'common_name': 'common_name'
+ }
+ data.update(set_default_args())
+ set_module_args(data)
+ my_obj = my_module()
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['msg'] == TYPE_ERROR
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_ignore_name_error(mock_request):
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_uuid'],
+ SRR['error_unexpected_name'], # get certificate -> error
+ SRR['get_uuid'], # get certificate -> found
+ SRR['end_of_sequence']
+ ]
+ data = {
+ 'vserver': 'abc',
+ 'signing_request': 'CSR',
+ 'state': 'absent',
+ 'common_name': 'common_name',
+ 'type': 'root_ca'
+ }
+ data.update(set_default_args())
+ set_module_args(data)
+ my_obj = my_module()
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ msg = "'signing_request' is not supported with 'state' set to 'absent'"
+ assert exc.value.args[0]['msg'] == msg
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_successful_create_name_error(mock_request):
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_uuid'],
+ SRR['error_unexpected_name'], # get certificate -> error
+ SRR['empty_records'], # get certificate -> not found
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ data = {
+ 'common_name': 'cname',
+ 'type': 'client_ca',
+ 'vserver': 'abc',
+ }
+ data.update(set_default_args())
+ set_module_args(data)
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+ print(mock_request.mock_calls)
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_data_vserver_not_exist(mock_request):
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['empty_records'],
+ SRR['end_of_sequence']
+ ]
+ data = {
+ 'common_name': 'cname',
+ 'type': 'client_ca',
+ 'vserver': 'abc',
+ }
+ data.update(set_default_args())
+ set_module_args(data)
+ my_obj = my_module()
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.apply()
+ assert 'Error vserver abc does not exist or is not a data vserver.' in exc.value.args[0]['msg']
+
+
+def test_rest_negative_no_name_and_type():
+ data = {
+ 'common_name': 'cname',
+ # 'type': 'client_ca',
+ 'vserver': 'abc',
+ }
+ data.update(set_default_args())
+ data.pop('name')
+ set_module_args(data)
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_module()
+ msg = "Error: 'name' or ('common_name' and 'type') are required parameters."
+ assert msg == exc.value.args[0]['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_negative_ZAPI_only(mock_request):
+ mock_request.side_effect = [
+ SRR['is_zapi'],
+ SRR['end_of_sequence']
+ ]
+ set_module_args(set_default_args())
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj = my_module()
+ print(exc.value.args[0])
+ msg = "na_ontap_security_certificates only supports REST, and requires ONTAP 9.6 or later. - Unreachable"
+ assert msg == exc.value.args[0]['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_security_config.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_security_config.py
new file mode 100644
index 000000000..1ffdfbc02
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_security_config.py
@@ -0,0 +1,254 @@
+# (c) 2021-2023, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+''' unit tests ONTAP Ansible module: na_ontap_security_config '''
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ call_main, create_module, create_and_apply, expect_and_capture_ansible_exception, AnsibleFailJson, patch_ansible
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke, register_responses, get_mock_record
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_security_config \
+ import NetAppOntapSecurityConfig as security_config_module, main as my_main # module under test
+
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+# REST API canned responses when mocking send_request
+SRR = rest_responses({
+ # module specific responses
+ 'security_config_record': (200, {
+ "records": [{
+ "is_fips_enabled": False,
+ "supported_protocols": ['TLSv1.3', 'TLSv1.2', 'TLSv1.1'],
+ "supported_cipher_suites": 'TLS_RSA_WITH_AES_128_CCM_8'
+ }], "num_records": 1
+ }, None),
+ "no_record": (
+ 200,
+ {"num_records": 0},
+ None)
+})
+
+
+security_config_info = {
+ 'num-records': 1,
+ 'attributes': {
+ 'security-config-info': {
+ "interface": 'ssl',
+ "is-fips-enabled": False,
+ "supported-protocols": ['TLSv1.2', 'TLSv1.1'],
+ "supported-ciphers": 'ALL:!LOW:!aNULL:!EXP:!eNULL:!3DES:!DES:!RC4'
+ }
+ },
+}
+
+
+ZRR = zapi_responses({
+ 'security_config_info': build_zapi_response(security_config_info)
+})
+
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'use_rest': 'never',
+}
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ security_config_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+
+def test_error_get_security_config_info():
+ register_responses([
+ ('ZAPI', 'security-config-get', ZRR['error'])
+ ])
+ module_args = {
+ "name": 'ssl',
+ "is_fips_enabled": False,
+ "supported_protocols": ['TLSv1.2', 'TLSv1.1']
+ }
+ error = call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ msg = "Error getting security config for interface"
+ assert msg in error
+
+
+def test_get_security_config_info():
+ register_responses([
+ ('security-config-get', ZRR['security_config_info'])
+ ])
+ security_obj = create_module(security_config_module, DEFAULT_ARGS)
+ result = security_obj.get_security_config()
+ assert result
+
+
+def test_modify_security_config_fips():
+ register_responses([
+ ('ZAPI', 'security-config-get', ZRR['security_config_info']),
+ ('ZAPI', 'security-config-modify', ZRR['success'])
+ ])
+ module_args = {
+ "is_fips_enabled": True,
+ "supported_protocols": ['TLSv1.3', 'TLSv1.2'],
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_modify_security_config_fips():
+ register_responses([
+ ('ZAPI', 'security-config-get', ZRR['security_config_info']),
+ ('ZAPI', 'security-config-modify', ZRR['error'])
+ ])
+ module_args = {
+ "is_fips_enabled": True,
+ "supported_protocols": ['TLSv1.3', 'TLSv1.2'],
+ }
+ error = call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert "Error modifying security config for interface" in error
+
+
+def test_error_security_config():
+ register_responses([
+ ])
+ module_args = {
+ "is_fips_enabled": True,
+ "supported_protocols": ['TLSv1.2', 'TLSv1.1', 'TLSv1'],
+ }
+ error = create_module(security_config_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert 'If fips is enabled then TLSv1 is not a supported protocol' in error
+
+
+def test_error_security_config_supported_ciphers():
+ register_responses([
+ ])
+ module_args = {
+ "is_fips_enabled": True,
+ "supported_ciphers": 'ALL:!LOW:!aNULL:!EXP:!eNULL:!3DES:!DES:!RC4',
+ }
+ error = create_module(security_config_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert 'If fips is enabled then supported ciphers should not be specified' in error
+
+
+ARGS_REST = {
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'use_rest': 'always'
+}
+
+
+def test_rest_error_get():
+ '''Test error rest get'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', '/security', SRR['generic_error']),
+ ])
+ module_args = {
+ "is_fips_enabled": False,
+ "supported_protocols": ['TLSv1.2', 'TLSv1.1']
+ }
+ error = call_main(my_main, ARGS_REST, module_args, fail=True)['msg']
+ assert "Error on getting security config: calling: /security: got Expected error." in error
+
+
+def test_rest_get_security_config():
+ '''Test error rest get'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', '/security', SRR['security_config_record']),
+ ])
+ module_args = {
+ "is_fips_enabled": False,
+ "supported_protocols": ['TLSv1.2', 'TLSv1.1']
+ }
+ security_obj = create_module(security_config_module, ARGS_REST, module_args)
+ result = security_obj.get_security_config_rest()
+ assert result
+
+
+def test_rest_modify_security_config():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', '/security', SRR['security_config_record']),
+ ('PATCH', '/security', SRR['success']),
+ ])
+ module_args = {
+ "is_fips_enabled": False,
+ "supported_protocols": ['TLSv1.3', 'TLSv1.2', 'TLSv1.1'],
+ "supported_cipher_suites": 'TLS_RSA_WITH_AES_128_CCM'
+ }
+ assert call_main(my_main, ARGS_REST, module_args)['changed']
+
+
+def test_rest_error_security_config():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ])
+ module_args = {
+ "is_fips_enabled": True,
+ "supported_protocols": ['TLSv1.2', 'TLSv1.1', 'TLSv1'],
+ "supported_cipher_suites": 'TLS_RSA_WITH_AES_128_CCM'
+ }
+ error = create_module(security_config_module, ARGS_REST, module_args, fail=True)['msg']
+ assert 'If fips is enabled then TLSv1 is not a supported protocol' in error
+
+
+def test_rest_error_security_config_protocol():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ])
+ module_args = {
+ "is_fips_enabled": True,
+ "supported_protocols": ['TLSv1.2', 'TLSv1.1'],
+ "supported_cipher_suites": 'TLS_RSA_WITH_AES_128_CCM'
+ }
+ error = create_module(security_config_module, ARGS_REST, module_args, fail=True)['msg']
+ assert 'If fips is enabled then TLSv1.1 is not a supported protocol' in error
+
+
+def test_rest_error_modify_security_config():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', '/security', SRR['security_config_record']),
+ ('PATCH', '/security', SRR['generic_error']),
+ ])
+ module_args = {
+ "is_fips_enabled": True,
+ "supported_protocols": ['TLSv1.3', 'TLSv1.2'],
+ "supported_cipher_suites": 'TLS_RSA_WITH_AES_128_CCM'
+ }
+ error = call_main(my_main, ARGS_REST, module_args, fail=True)['msg']
+ assert "Error on modifying security config: calling: /security: got Expected error." in error
+
+
+def test_rest_modify_security_config_fips():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', '/security', SRR['security_config_record']),
+ ('PATCH', '/security', SRR['success']),
+ ])
+ module_args = {
+ "is_fips_enabled": True,
+ "supported_protocols": ['TLSv1.3', 'TLSv1.2'],
+ "supported_cipher_suites": 'TLS_RSA_WITH_AES_128_CCM'
+ }
+ assert call_main(my_main, ARGS_REST, module_args)['changed']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_security_ipsec_ca_certificate.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_security_ipsec_ca_certificate.py
new file mode 100644
index 000000000..3728619eb
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_security_ipsec_ca_certificate.py
@@ -0,0 +1,140 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import sys
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import patch_ansible, \
+ create_and_apply, create_module, expect_and_capture_ansible_exception, call_main, assert_warning_was_raised, print_warnings
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke, \
+ register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_security_ipsec_ca_certificate \
+ import NetAppOntapSecurityCACertificate as my_module, main as my_main # module under test
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'name': 'cert1',
+ 'use_rest': 'always'
+}
+
+
+SRR = rest_responses({
+ 'ipsec_ca_svm_scope': (200, {"records": [{
+ 'name': 'cert1',
+ 'svm': {'name': 'svm4'},
+ 'uuid': '380a12f7'
+ }], "num_records": 1}, None),
+ 'ipsec_ca_cluster_scope': (200, {"records": [{
+ 'name': 'cert2',
+ 'scope': 'cluster',
+ 'uuid': '878eaa35'}], "num_records": 1}, None),
+ 'error_ipsec_ca_not_exist': (404, None, {'code': 4, 'message': "entry doesn't exist"}),
+})
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ # with python 2.6, dictionaries are not ordered
+ fragments = ["missing required arguments:", "hostname", "name"]
+ error = create_module(my_module, {}, fail=True)['msg']
+ for fragment in fragments:
+ assert fragment in error
+
+
+def test_create_security_ipsec_ca_certificate_svm():
+ ''' create ipsec ca certificates in svm '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'security/certificates', SRR['ipsec_ca_svm_scope']), # get certificate uuid.
+ ('GET', 'security/ipsec/ca-certificates/380a12f7', SRR['error_ipsec_ca_not_exist']), # ipsec ca does not exist.
+ ('POST', 'security/ipsec/ca-certificates', SRR['success']), # create.
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'security/certificates', SRR['ipsec_ca_svm_scope']), # get certificate uuid.
+ ('GET', 'security/ipsec/ca-certificates/380a12f7', SRR['ipsec_ca_svm_scope']), # ipsec ca does not exist.
+ ])
+ assert create_and_apply(my_module, DEFAULT_ARGS, {'svm': 'svm4'})['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS, {'svm': 'svm4'})['changed']
+
+
+def test_create_security_ipsec_ca_certificate_cluster():
+ ''' create ipsec ca certificates in cluster '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'security/certificates', SRR['ipsec_ca_cluster_scope']),
+ ('GET', 'security/ipsec/ca-certificates/878eaa35', SRR['error_ipsec_ca_not_exist']),
+ ('POST', 'security/ipsec/ca-certificates', SRR['success']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'security/certificates', SRR['ipsec_ca_cluster_scope']),
+ ('GET', 'security/ipsec/ca-certificates/878eaa35', SRR['ipsec_ca_cluster_scope'])
+ ])
+ assert create_and_apply(my_module, DEFAULT_ARGS, {'name': 'cert1'})['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS, {'name': 'cert1'})['changed']
+
+
+def test_error_certificate_not_exist():
+ ''' error if certificate not present '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'security/certificates', SRR['empty_records']),
+ # do not throw error if certificate not exist and state is absent.
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'security/certificates', SRR['empty_records'])
+ ])
+ error = "Error: certificate cert1 is not installed"
+ assert error in create_and_apply(my_module, DEFAULT_ARGS, fail=True)['msg']
+ assert not create_and_apply(my_module, DEFAULT_ARGS, {'state': 'absent'})['changed']
+
+
+def test_delete_security_ipsec_ca_certificate():
+ ''' test delete ipsec ca certificate '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'security/certificates', SRR['ipsec_ca_cluster_scope']),
+ ('GET', 'security/ipsec/ca-certificates/878eaa35', SRR['ipsec_ca_cluster_scope']),
+ ('DELETE', 'security/ipsec/ca-certificates/878eaa35', SRR['success']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'security/certificates', SRR['ipsec_ca_cluster_scope']),
+ ('GET', 'security/ipsec/ca-certificates/878eaa35', SRR['empty_records'])
+ ])
+ assert create_and_apply(my_module, DEFAULT_ARGS, {'state': 'absent'})['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS, {'state': 'absent'})['changed']
+
+
+def test_all_methods_catch_exception():
+ ''' test exception in get/create/modify/delete '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ # GET/POST/DELETE error.
+ ('GET', 'security/certificates', SRR['generic_error']),
+ ('GET', 'security/certificates', SRR['ipsec_ca_cluster_scope']),
+ ('GET', 'security/ipsec/ca-certificates/878eaa35', SRR['generic_error']),
+ ('POST', 'security/ipsec/ca-certificates', SRR['generic_error']),
+ ('DELETE', 'security/ipsec/ca-certificates/878eaa35', SRR['generic_error'])
+ ])
+ ca_obj = create_module(my_module, DEFAULT_ARGS)
+ assert 'Error fetching uuid for certificate' in expect_and_capture_ansible_exception(ca_obj.get_certificate_uuid, 'fail')['msg']
+ assert 'Error fetching security IPsec CA certificate' in expect_and_capture_ansible_exception(ca_obj.get_ipsec_ca_certificate, 'fail')['msg']
+ assert 'Error adding security IPsec CA certificate' in expect_and_capture_ansible_exception(ca_obj.create_ipsec_ca_certificate, 'fail')['msg']
+ assert 'Error deleting security IPsec CA certificate' in expect_and_capture_ansible_exception(ca_obj.delete_ipsec_ca_certificate, 'fail')['msg']
+
+
+def test_error_ontap9_9_1():
+ ''' test module supported from 9.10.1 '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1'])
+ ])
+ assert 'requires ONTAP 9.10.1 or later' in call_main(my_main, DEFAULT_ARGS, fail=True)['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_security_ipsec_config.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_security_ipsec_config.py
new file mode 100644
index 000000000..e4f7d2527
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_security_ipsec_config.py
@@ -0,0 +1,87 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import sys
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import patch_ansible, \
+ create_and_apply, create_module, expect_and_capture_ansible_exception, call_main, assert_warning_was_raised, print_warnings
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke, \
+ register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_security_ipsec_config \
+ import NetAppOntapSecurityIPsecConfig as my_module, main as my_main # module under test
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'use_rest': 'always'
+}
+
+
+SRR = rest_responses({
+ 'ipsec_config': (200, {"records": [{"enabled": True, "replay_window": "64"}]}, None),
+ 'ipsec_config_1': (200, {"records": [{"enabled": False, "replay_window": "0"}]}, None)
+})
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ # with python 2.6, dictionaries are not ordered
+ fragments = ["missing required arguments:", "hostname"]
+ error = create_module(my_module, {}, fail=True)['msg']
+ for fragment in fragments:
+ assert fragment in error
+
+
+def test_modify_security_ipsec_config():
+ ''' create ipsec policy with certificates '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/ipsec', SRR['ipsec_config_1']),
+ ('PATCH', 'security/ipsec', SRR['success']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/ipsec', SRR['ipsec_config']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/ipsec', SRR['empty_records']),
+ ])
+ args = {
+ "enabled": True,
+ "replay_window": 64
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_all_methods_catch_exception():
+ ''' test exception in get/create/modify/delete '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ # GET/PATCH error.
+ ('GET', 'security/ipsec', SRR['generic_error']),
+ ('PATCH', 'security/ipsec', SRR['generic_error'])
+ ])
+ sec_obj = create_module(my_module, DEFAULT_ARGS)
+ assert 'Error fetching security IPsec config' in expect_and_capture_ansible_exception(sec_obj.get_security_ipsec_config, 'fail')['msg']
+ assert 'Error modifying security IPsec config' in expect_and_capture_ansible_exception(sec_obj.modify_security_ipsec_config, 'fail', {})['msg']
+
+
+def test_error_ontap97():
+ ''' test module supported from 9.8 '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97'])
+ ])
+ assert 'requires ONTAP 9.8.0 or later' in call_main(my_main, DEFAULT_ARGS, fail=True)['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_security_ipsec_policy.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_security_ipsec_policy.py
new file mode 100644
index 000000000..b913ac03e
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_security_ipsec_policy.py
@@ -0,0 +1,268 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import sys
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import patch_ansible, \
+ create_and_apply, create_module, expect_and_capture_ansible_exception, call_main, assert_warning_was_raised, print_warnings
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke, \
+ register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_security_ipsec_policy \
+ import NetAppOntapSecurityIPsecPolicy as my_module, main as my_main # module under test
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'name': 'ipsec_policy',
+ 'use_rest': 'always',
+ 'local_endpoint': {
+ 'address': '10.23.43.23',
+ 'netmask': '24',
+ 'port': '201'
+ },
+ 'remote_endpoint': {
+ 'address': '10.23.43.13',
+ 'netmask': '24'
+ },
+ 'protocol': 'tcp'
+}
+
+
+def form_rest_response(args=None):
+ response = {
+ "uuid": "6c025f9b",
+ "name": "ipsec1",
+ "scope": "svm",
+ "svm": {"name": "ansibleSVM"},
+ "local_endpoint": {
+ "address": "10.23.43.23",
+ "netmask": "24",
+ "port": "201-201"
+ },
+ "remote_endpoint": {
+ "address": "10.23.43.13",
+ "netmask": "24",
+ "port": "0-0"
+ },
+ "protocol": "tcp",
+ "local_identity": "ing",
+ "remote_identity": "ing",
+ "action": "discard",
+ "enabled": False,
+ "authentication_method": "none"
+ }
+ if args:
+ response.update(args)
+ return response
+
+
+SRR = rest_responses({
+ 'ipsec_auth_none': (200, {"records": [form_rest_response()], "num_records": 1}, None),
+ 'ipsec_auth_psk': (200, {"records": [form_rest_response({
+ "action": "esp_transport",
+ "authentication_method": "psk"
+ })], "num_records": 1}, None),
+ 'ipsec_auth_pki': (200, {"records": [form_rest_response({
+ "action": "esp_transport",
+ "authentication_method": "pki",
+ "certificate": {"name": "ca_cert"}
+ })], "num_records": 1}, None),
+ 'ipsec_modify': (200, {"records": [form_rest_response({
+ "local_endpoint": {"address": "10.23.43.24", "netmask": "24"},
+ "remote_endpoint": {"address": "10.23.43.14", "netmask": "24", "port": "200-200"},
+ "protocol": "udp",
+ })], "num_records": 1}, None),
+ 'ipsec_ipv6': (200, {"records": [form_rest_response({
+ "local_endpoint": {"address": "2402:940::45", "netmask": "64", "port": "120-120"},
+ "remote_endpoint": {"address": "2402:940::55", "netmask": "64", "port": "200-200"},
+ "protocol": "udp",
+ })], "num_records": 1}, None),
+ 'ipsec_ipv6_modify': (200, {"records": [form_rest_response({
+ "local_endpoint": {"address": "2402:940::46", "netmask": "64", "port": "120-120"},
+ "remote_endpoint": {"address": "2402:940::56", "netmask": "64", "port": "200-200"},
+ "protocol": "udp",
+ })], "num_records": 1}, None)
+})
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ # with python 2.6, dictionaries are not ordered
+ fragments = ["missing required arguments:", "hostname", "name"]
+ error = create_module(my_module, {}, fail=True)['msg']
+ for fragment in fragments:
+ assert fragment in error
+
+
+def test_create_security_ipsec_policy_certificate():
+ ''' create ipsec policy with certificates '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'security/ipsec/policies', SRR['empty_records']),
+ ('POST', 'security/ipsec/policies', SRR['success']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'security/ipsec/policies', SRR['ipsec_auth_pki']),
+ ])
+ args = {
+ "action": "esp_transport",
+ "authentication_method": "pki",
+ "certificate": "ca_cert"
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_create_security_ipsec_policy_psk():
+ ''' create ipsec policy with pre-shared keys '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'security/ipsec/policies', SRR['empty_records']),
+ ('POST', 'security/ipsec/policies', SRR['success']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'security/ipsec/policies', SRR['ipsec_auth_psk']),
+ ])
+ args = {
+ "action": "esp_transport",
+ "authentication_method": "psk",
+ "secret_key": "QDFRTGJUOJDE4RFGDSDW"
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_create_security_ipsec_policy():
+ ''' create ipsec policy without authentication method in 9.8 '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/ipsec/policies', SRR['empty_records']),
+ ('POST', 'security/ipsec/policies', SRR['success']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/ipsec/policies', SRR['ipsec_auth_none']),
+ ])
+ assert create_and_apply(my_module, DEFAULT_ARGS)['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS)['changed']
+
+
+def test_modify_security_ipsec_policy():
+ ''' modify ipsec policy '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/ipsec/policies', SRR['ipsec_auth_none']),
+ ('PATCH', 'security/ipsec/policies/6c025f9b', SRR['success']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/ipsec/policies', SRR['ipsec_modify'])
+ ])
+ args = {
+ "local_endpoint": {"address": "10.23.43.24", "netmask": "255.255.255.0"},
+ "remote_endpoint": {"address": "10.23.43.14", "netmask": "255.255.255.0", "port": "200"},
+ "protocol": "udp"
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_warnings_raised():
+ ''' test warnings raised '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1'])
+ ])
+ args = {"certificate": "new_Cert", "authentication_method": "pki", "action": "discard"}
+ create_module(my_module, DEFAULT_ARGS, args)
+ warning = "The IPsec action is discard"
+ print_warnings()
+ assert_warning_was_raised(warning, partial_match=True)
+
+ args = {"secret_key": "AEDFGJTUSHNFGKGLFD", "authentication_method": "psk", "action": "bypass"}
+ create_module(my_module, DEFAULT_ARGS, args)
+ warning = "The IPsec action is bypass"
+ print_warnings()
+ assert_warning_was_raised(warning, partial_match=True)
+
+
+def test_modify_security_ipsec_policy_ipv6():
+ ''' test modify ipv6 address '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/ipsec/policies', SRR['ipsec_ipv6']),
+ ('PATCH', 'security/ipsec/policies/6c025f9b', SRR['success']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/ipsec/policies', SRR['ipsec_ipv6_modify'])
+ ])
+ args = {
+ "local_endpoint": {"address": "2402:0940:000:000:00:00:0000:0046", "netmask": "64"},
+ "remote_endpoint": {"address": "2402:0940:000:000:00:00:0000:0056", "netmask": "64", "port": "200"},
+ "protocol": "17",
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_delete_security_ipsec_policy():
+ ''' test delete ipsec policy '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/ipsec/policies', SRR['ipsec_auth_none']),
+ ('DELETE', 'security/ipsec/policies/6c025f9b', SRR['success']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/ipsec/policies', SRR['empty_records'])
+ ])
+ assert create_and_apply(my_module, DEFAULT_ARGS, {'state': 'absent'})['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS, {'state': 'absent'})['changed']
+
+
+def test_all_methods_catch_exception():
+ ''' test exception in get/create/modify/delete '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ # GET/POST/PATCH/DELETE error.
+ ('GET', 'security/ipsec/policies', SRR['generic_error']),
+ ('POST', 'security/ipsec/policies', SRR['generic_error']),
+ ('PATCH', 'security/ipsec/policies/6c025f9b', SRR['generic_error']),
+ ('DELETE', 'security/ipsec/policies/6c025f9b', SRR['generic_error'])
+ ])
+ sec_obj = create_module(my_module, DEFAULT_ARGS)
+ sec_obj.uuid = '6c025f9b'
+ assert 'Error fetching security ipsec policy' in expect_and_capture_ansible_exception(sec_obj.get_security_ipsec_policy, 'fail')['msg']
+ assert 'Error creating security ipsec policy' in expect_and_capture_ansible_exception(sec_obj.create_security_ipsec_policy, 'fail')['msg']
+ assert 'Error modifying security ipsec policy' in expect_and_capture_ansible_exception(sec_obj.modify_security_ipsec_policy, 'fail', {})['msg']
+ assert 'Error deleting security ipsec policy' in expect_and_capture_ansible_exception(sec_obj.delete_security_ipsec_policy, 'fail')['msg']
+
+
+def test_modify_error():
+ ''' test modify error '''
+ register_responses([
+ # Error if try to modify certificate for auth_method none.
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'security/ipsec/policies', SRR['ipsec_auth_none']),
+ # Error if try to modify action and authentication_method
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'security/ipsec/policies', SRR['ipsec_auth_none'])
+
+ ])
+ args = {'certificate': 'cert_new'}
+ assert 'Error: cannot set certificate for IPsec policy' in create_and_apply(my_module, DEFAULT_ARGS, args, fail=True)['msg']
+ args = {'authentication_method': 'psk', 'action': 'esp_udp', 'secret_key': 'secretkey'}
+ assert 'Error: cannot modify options' in create_and_apply(my_module, DEFAULT_ARGS, args, fail=True)['msg']
+
+
+def test_error_ontap97():
+ ''' test module supported from 9.8 '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97'])
+ ])
+ assert 'requires ONTAP 9.8.0 or later' in call_main(my_main, DEFAULT_ARGS, fail=True)['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_security_key_manager.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_security_key_manager.py
new file mode 100644
index 000000000..38d18981f
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_security_key_manager.py
@@ -0,0 +1,804 @@
+# (c) 2019, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test template for ONTAP Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import\
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_error_message, rest_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_error_message, zapi_responses
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import\
+ assert_warning_was_raised, call_main, create_module, expect_and_capture_ansible_exception, patch_ansible, print_warnings
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_security_key_manager import\
+ NetAppOntapSecurityKeyManager as my_module, main as my_main # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+security_key_info = {
+ 'attributes-list': {
+ 'key-manager-info': {
+ 'key-manager-ip-address': '0.1.2.3',
+ 'key-manager-server-status': 'available',
+ 'key-manager-tcp-port': '5696',
+ 'node-name': 'test_node'
+ }
+ }
+}
+
+ZRR = zapi_responses({
+ 'security_key_info': build_zapi_response(security_key_info, 1)
+})
+
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+}
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ register_responses([
+ ])
+ module_args = {
+ 'use_rest': 'never'
+ }
+ error = 'missing required arguments:'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_get_nonexistent_key_manager():
+ ''' Test if get_key_manager() returns None for non-existent key manager '''
+ register_responses([
+ ('ZAPI', 'security-key-manager-get-iter', ZRR['no_records']),
+ ])
+ module_args = {
+ 'ip_address': '1.2.3.4',
+ 'use_rest': 'never'
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ result = my_obj.get_key_manager()
+ assert result is None
+
+
+def test_get_existing_key_manager():
+ ''' Test if get_key_manager() returns details for existing key manager '''
+ register_responses([
+ ('ZAPI', 'security-key-manager-get-iter', ZRR['security_key_info']),
+ ])
+ module_args = {
+ 'ip_address': '1.2.3.4',
+ 'use_rest': 'never'
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ result = my_obj.get_key_manager()
+ assert result['ip_address'] == '0.1.2.3'
+
+
+def test_successfully_add_key_manager():
+ ''' Test successfully add key manager'''
+ register_responses([
+ ('ZAPI', 'security-key-manager-setup', ZRR['success']),
+ ('ZAPI', 'security-key-manager-get-iter', ZRR['no_records']),
+ ('ZAPI', 'security-key-manager-add', ZRR['success']),
+ # idempotency
+ ('ZAPI', 'security-key-manager-setup', ZRR['success']),
+ ('ZAPI', 'security-key-manager-get-iter', ZRR['security_key_info']),
+ ])
+ module_args = {
+ 'ip_address': '0.1.2.3',
+ 'use_rest': 'never'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_modify_key_manager():
+ ''' Test successfully add key manager'''
+ register_responses([
+ ('ZAPI', 'security-key-manager-setup', ZRR['success']),
+ ('ZAPI', 'security-key-manager-get-iter', ZRR['security_key_info']),
+ ])
+ module_args = {
+ 'ip_address': '1.2.3.4',
+ 'use_rest': 'never'
+ }
+ error = "Error, cannot modify existing configuraton: modify is not supported with ZAPI, new values: {'ip_address': '1.2.3.4'}, current values:"
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_successfully_delete_key_manager():
+ ''' Test successfully delete key manager'''
+ register_responses([
+ ('ZAPI', 'security-key-manager-setup', ZRR['success']),
+ ('ZAPI', 'security-key-manager-get-iter', ZRR['security_key_info']),
+ ('ZAPI', 'security-key-manager-delete', ZRR['success']),
+ # idempotency
+ ('ZAPI', 'security-key-manager-setup', ZRR['success']),
+ ('ZAPI', 'security-key-manager-get-iter', ZRR['no_records']),
+ ])
+ module_args = {
+ 'ip_address': '1.2.3.4',
+ 'state': 'absent',
+ 'use_rest': 'never',
+ 'node': 'some_node'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ print_warnings()
+ assert_warning_was_raised('The option "node" is deprecated and should not be used.')
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.has_netapp_lib')
+def test_fail_netapp_lib_error(mock_has_netapp_lib):
+ mock_has_netapp_lib.return_value = False
+ module_args = {
+ 'ip_address': '1.2.3.4',
+ 'use_rest': 'never',
+ 'node': 'some_node'
+ }
+ error = 'Error: the python NetApp-Lib module is required. Import error: None'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ print_warnings()
+ assert_warning_was_raised('The option "node" is deprecated and should not be used.')
+
+
+def test_error_handling():
+ ''' test error handling on ZAPI calls '''
+ register_responses([
+ ('ZAPI', 'security-key-manager-setup', ZRR['error']),
+ ('ZAPI', 'security-key-manager-get-iter', ZRR['error']),
+ ('ZAPI', 'security-key-manager-add', ZRR['error']),
+ ('ZAPI', 'security-key-manager-delete', ZRR['error']),
+
+ ])
+ module_args = {
+ 'ip_address': '1.2.3.4',
+ 'use_rest': 'never',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ error = zapi_error_message('Error setting up key manager')
+ assert error in expect_and_capture_ansible_exception(my_obj.key_manager_setup, 'fail')['msg']
+ error = zapi_error_message('Error fetching key manager')
+ assert error in expect_and_capture_ansible_exception(my_obj.get_key_manager, 'fail')['msg']
+ error = zapi_error_message('Error creating key manager')
+ assert error in expect_and_capture_ansible_exception(my_obj.create_key_manager, 'fail')['msg']
+ error = zapi_error_message('Error deleting key manager')
+ assert error in expect_and_capture_ansible_exception(my_obj.delete_key_manager, 'fail')['msg']
+
+
+def test_rest_is_required():
+ '''report error if external or onboard are used with ZAPI'''
+ register_responses([
+ ])
+ module_args = {
+ 'onboard': {
+ 'synchronize': True
+ },
+ 'use_rest': 'never',
+ }
+ error = 'Error: REST is required for onboard option.'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ module_args = {
+ 'external': {
+ 'servers': ['0.1.2.3:5696'],
+ 'client_certificate': 'client_certificate',
+ 'server_ca_certificates': ['server_ca_certificate']
+ },
+ 'use_rest': 'never',
+ 'vserver': 'svm_name',
+ }
+ error = 'options.'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+# REST API canned responses when mocking send_request
+SRR = rest_responses({
+ 'one_external_seckey_record': (200, {
+ 'records': [{
+ 'uuid': 'a1b2c3',
+ 'external': {
+ 'servers': [{'server': '0.1.2.3:5696'}]
+ }}],
+ 'num_records': 1
+ }, None),
+ 'one_external_seckey_record_2_servers': (200, {
+ 'records': [{
+ 'uuid': 'a1b2c3',
+ 'external': {
+ 'servers': [
+ {'server': '1.2.3.4:5696'},
+ {'server': '0.1.2.3:5696'}]
+ },
+ 'onboard': {'enabled': False}}],
+ 'num_records': 1
+ }, None),
+ 'one_onboard_seckey_record': (200, {
+ 'records': [{
+ 'uuid': 'a1b2c3',
+ 'onboard': {
+ 'enabled': True,
+ 'key_backup': "certificate",
+ }}],
+ 'num_records': 1
+ }, None),
+ 'one_security_certificate_record': (200, {
+ 'records': [{'uuid': 'a1b2c3'}],
+ 'num_records': 1
+ }, None),
+ 'error_duplicate': (400, None, {'message': 'New passphrase cannot be same as the old passphrase.'}),
+ 'error_incorrect': (400, None, {'message': 'Cluster-wide passphrase is incorrect.'}),
+ 'error_svm_not_found': (400, None, {'message': 'SVM "svm_name" does not exist'}),
+ 'error_already_present': (400, None, {'message': 'already has external key management configured'}),
+}, False)
+
+
+def test_successfully_add_key_manager_old_style_rest():
+ ''' Test successfully add key manager'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/key-managers', SRR['zero_records']),
+ ('POST', 'security/key-managers', SRR['success']),
+ # idempotency
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/key-managers', SRR['one_external_seckey_record']),
+ ])
+ module_args = {
+ 'ip_address': '0.1.2.3',
+ 'use_rest': 'always'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successfully_add_key_manager_external_rest():
+ ''' Test successfully add key manager'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/key-managers', SRR['zero_records']),
+ ('GET', 'security/certificates', SRR['one_security_certificate_record']),
+ ('GET', 'security/certificates', SRR['one_security_certificate_record']),
+ ('POST', 'security/key-managers', SRR['success']),
+ # idempotency
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/key-managers', SRR['one_external_seckey_record']),
+ ('GET', 'security/certificates', SRR['one_security_certificate_record']),
+ ('GET', 'security/certificates', SRR['one_security_certificate_record']),
+ ])
+ module_args = {
+ 'external': {
+ 'servers': ['0.1.2.3:5696'],
+ 'client_certificate': 'client_certificate',
+ 'server_ca_certificates': ['server_ca_certificate']
+ },
+ 'use_rest': 'always'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successfully_add_key_manager_external_rest_svm():
+ ''' Test successfully add key manager'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/key-managers', SRR['zero_records']),
+ ('GET', 'security/certificates', SRR['one_security_certificate_record']),
+ ('GET', 'security/certificates', SRR['one_security_certificate_record']),
+ ('POST', 'security/key-managers', SRR['success']),
+ # idempotency
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/key-managers', SRR['one_external_seckey_record']),
+ ('GET', 'security/certificates', SRR['one_security_certificate_record']),
+ ('GET', 'security/certificates', SRR['one_security_certificate_record']),
+ ])
+ module_args = {
+ 'external': {
+ 'servers': ['0.1.2.3:5696'],
+ 'client_certificate': 'client_certificate',
+ 'server_ca_certificates': ['server_ca_certificate']
+ },
+ 'vserver': 'svm_name',
+ 'use_rest': 'always'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successfully_add_key_manager_onboard_rest():
+ ''' Test successfully add key manager'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/key-managers', SRR['zero_records']),
+ ('POST', 'security/key-managers', SRR['success']),
+ # idempotency
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/key-managers', SRR['one_onboard_seckey_record']),
+ ('PATCH', 'security/key-managers/a1b2c3', SRR['error_duplicate']),
+ ])
+ module_args = {
+ 'onboard': {
+ 'passphrase': 'passphrase_too_short',
+ 'from_passphrase': 'ignored on create',
+ },
+ 'use_rest': 'always'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_add_key_manager_onboard_svm_rest():
+ ''' Test successfully add key manager'''
+ register_responses([
+ ])
+ module_args = {
+ 'onboard': {
+ 'passphrase': 'passphrase_too_short',
+ 'from_passphrase': 'ignored on create',
+ },
+ 'vserver': 'svm_name',
+ 'use_rest': 'always'
+ }
+ error = 'parameters are mutually exclusive:'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_successfully_delete_key_manager_rest():
+ ''' Test successfully add key manager'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/key-managers', SRR['one_onboard_seckey_record']),
+ ('DELETE', 'security/key-managers/a1b2c3', SRR['success']),
+ # idempotency
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/key-managers', SRR['zero_records']),
+ ])
+ module_args = {
+ 'state': 'absent',
+ 'use_rest': 'always'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successfully_change_passphrase_onboard_key_manager_rest():
+ ''' Test successfully add key manager'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/key-managers', SRR['one_onboard_seckey_record']),
+ ('PATCH', 'security/key-managers/a1b2c3', SRR['error_incorrect']),
+ ('PATCH', 'security/key-managers/a1b2c3', SRR['error_duplicate']),
+ ('PATCH', 'security/key-managers/a1b2c3', SRR['success']),
+ # both passphrases are incorrect
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/key-managers', SRR['one_onboard_seckey_record']),
+ ('PATCH', 'security/key-managers/a1b2c3', SRR['error_incorrect']),
+ ('PATCH', 'security/key-managers/a1b2c3', SRR['error_incorrect']),
+ # unexpected success on check passphrase
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/key-managers', SRR['one_onboard_seckey_record']),
+ ('PATCH', 'security/key-managers/a1b2c3', SRR['success']),
+ ('PATCH', 'security/key-managers/a1b2c3', SRR['error_duplicate']),
+ ('PATCH', 'security/key-managers/a1b2c3', SRR['success']),
+ # unexpected success on check passphrase
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/key-managers', SRR['one_onboard_seckey_record']),
+ ('PATCH', 'security/key-managers/a1b2c3', SRR['error_incorrect']),
+ ('PATCH', 'security/key-managers/a1b2c3', SRR['success']),
+ ('PATCH', 'security/key-managers/a1b2c3', SRR['success']),
+ # unexpected success on check passphrase
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/key-managers', SRR['one_onboard_seckey_record']),
+ ('PATCH', 'security/key-managers/a1b2c3', SRR['success']),
+ ('PATCH', 'security/key-managers/a1b2c3', SRR['success']),
+ ('PATCH', 'security/key-managers/a1b2c3', SRR['success']),
+ # unexpected error on check passphrase
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/key-managers', SRR['one_onboard_seckey_record']),
+ ('PATCH', 'security/key-managers/a1b2c3', SRR['generic_error']),
+ ('PATCH', 'security/key-managers/a1b2c3', SRR['error_duplicate']),
+ ('PATCH', 'security/key-managers/a1b2c3', SRR['success']),
+ # unexpected error on check passphrase
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/key-managers', SRR['one_onboard_seckey_record']),
+ ('PATCH', 'security/key-managers/a1b2c3', SRR['error_incorrect']),
+ ('PATCH', 'security/key-managers/a1b2c3', SRR['generic_error']),
+ ('PATCH', 'security/key-managers/a1b2c3', SRR['success']),
+ # unexpected error on check passphrase
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/key-managers', SRR['one_onboard_seckey_record']),
+ ('PATCH', 'security/key-managers/a1b2c3', SRR['generic_error']),
+ ('PATCH', 'security/key-managers/a1b2c3', SRR['generic_error']),
+ ('PATCH', 'security/key-managers/a1b2c3', SRR['success']),
+ # idempotency
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/key-managers', SRR['one_onboard_seckey_record']),
+ ('PATCH', 'security/key-managers/a1b2c3', SRR['error_duplicate']),
+ ])
+ module_args = {
+ 'onboard': {
+ 'passphrase': 'passphrase_too_short',
+ 'from_passphrase': 'passphrase_too_short'
+ },
+ 'use_rest': 'always'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ error = rest_error_message('Error: neither from_passphrase nor passphrase match installed passphrase',
+ 'security/key-managers/a1b2c3',
+ got="got {'message': 'Cluster-wide passphrase is incorrect.'}.")
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ # success
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ # ignored error
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ # idempotency
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successfully_change_passphrase_and_sync_onboard_key_manager_rest():
+ ''' Test successfully modify onboard key manager'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/key-managers', SRR['one_onboard_seckey_record']),
+ ('PATCH', 'security/key-managers/a1b2c3', SRR['error_incorrect']),
+ ('PATCH', 'security/key-managers/a1b2c3', SRR['error_duplicate']),
+ ('PATCH', 'security/key-managers/a1b2c3', SRR['success']),
+ # idempotency - sync is always sent!
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/key-managers', SRR['one_onboard_seckey_record']),
+ ('PATCH', 'security/key-managers/a1b2c3', SRR['error_duplicate']),
+ ('PATCH', 'security/key-managers/a1b2c3', SRR['success']),
+ ])
+ module_args = {
+ 'onboard': {
+ 'passphrase': 'passphrase_too_short',
+ 'from_passphrase': 'passphrase_too_short',
+ 'synchronize': True
+ },
+ 'use_rest': 'always'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ # idempotency
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successfully_change_external_key_manager_rest():
+ ''' Test successfully add key manager'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/key-managers', SRR['one_external_seckey_record_2_servers']),
+ ('DELETE', 'security/key-managers/a1b2c3/key-servers/1.2.3.4:5696', SRR['success']),
+ ('DELETE', 'security/key-managers/a1b2c3/key-servers/0.1.2.3:5696', SRR['success']),
+ ('POST', 'security/key-managers/a1b2c3/key-servers', SRR['success']),
+ ('POST', 'security/key-managers/a1b2c3/key-servers', SRR['success']),
+ # same servers but different order
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/key-managers', SRR['one_external_seckey_record_2_servers']),
+ # idempotency
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/key-managers', SRR['one_external_seckey_record_2_servers']),
+ ])
+ module_args = {
+ 'external': {
+ 'servers': ['0.1.2.3:5697', '1.2.3.4:5697']
+ },
+ 'use_rest': 'always'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ # same servers but different order
+ module_args = {
+ 'external': {
+ 'servers': ['0.1.2.3:5696', '1.2.3.4:5696']
+ },
+ 'use_rest': 'always'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ # idempotency
+ module_args = {
+ 'external': {
+ 'servers': ['1.2.3.4:5696', '0.1.2.3:5696']
+ },
+ 'use_rest': 'always'
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_external_key_manager_rest():
+ ''' Test error add key manager'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/key-managers', SRR['generic_error']),
+ ('GET', 'security/certificates', SRR['generic_error']),
+ ('POST', 'security/key-managers', SRR['generic_error']),
+ ('PATCH', 'security/key-managers/123', SRR['generic_error']),
+ ('DELETE', 'security/key-managers/123', SRR['generic_error']),
+ ('POST', 'security/key-managers/123/key-servers', SRR['generic_error']),
+ ('DELETE', 'security/key-managers/123/key-servers/server_name', SRR['generic_error']),
+ ])
+ module_args = {
+ 'use_rest': 'always'
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ error = rest_error_message('Error fetching key manager info for cluster', 'security/key-managers')
+ assert error in expect_and_capture_ansible_exception(my_obj.get_key_manager, 'fail')['msg']
+ error = rest_error_message('Error fetching security certificate info for name of type: type on cluster', 'security/certificates')
+ assert error in expect_and_capture_ansible_exception(my_obj.get_security_certificate_uuid_rest, 'fail', 'name', 'type')['msg']
+ error = rest_error_message('Error creating key manager for cluster', 'security/key-managers')
+ assert error in expect_and_capture_ansible_exception(my_obj.create_key_manager_rest, 'fail')['msg']
+ my_obj.uuid = '123'
+ error = rest_error_message('Error modifying key manager for cluster', 'security/key-managers/123')
+ assert error in expect_and_capture_ansible_exception(my_obj.modify_key_manager_rest, 'fail', {'onboard': {'xxxx': 'yyyy'}})['msg']
+ error = rest_error_message('Error deleting key manager for cluster', 'security/key-managers/123')
+ assert error in expect_and_capture_ansible_exception(my_obj.delete_key_manager_rest, 'fail')['msg']
+ error = rest_error_message('Error adding external key server server_name', 'security/key-managers/123/key-servers')
+ assert error in expect_and_capture_ansible_exception(my_obj.add_external_server_rest, 'fail', 'server_name')['msg']
+ error = rest_error_message('Error removing external key server server_name', 'security/key-managers/123/key-servers/server_name')
+ assert error in expect_and_capture_ansible_exception(my_obj.remove_external_server_rest, 'fail', 'server_name')['msg']
+
+
+def test_get_security_certificate_uuid_rest_by_name_then_common_name():
+ ''' Use name first, then common_name'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/certificates', SRR['zero_records']),
+ ('GET', 'security/certificates', SRR['one_security_certificate_record']),
+ # not found
+ ('GET', 'security/certificates', SRR['zero_records']),
+ ('GET', 'security/certificates', SRR['zero_records']),
+ # with 9.7 or earlier, name is not supported
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'security/certificates', SRR['one_security_certificate_record']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.get_security_certificate_uuid_rest('name', 'type') is not None
+ assert_warning_was_raised('certificate name not found, retrying with common_name and type type.')
+ # not found, neither with name nor common_name
+ error = 'Error fetching security certificate info for name of type: type on cluster: not found.'
+ assert error in expect_and_capture_ansible_exception(my_obj.get_security_certificate_uuid_rest, 'fail', 'name', 'type')['msg']
+ # 9.7
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.get_security_certificate_uuid_rest('name', 'type') is not None
+ assert_warning_was_raised('name is not supported in 9.6 or 9.7, using common_name name and type type.')
+
+
+def test_get_security_certificate_uuid_rest_by_name_then_common_name_svm():
+ ''' With SVM, retry at cluster scope if not found or error at SVM scope '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/certificates', SRR['zero_records']),
+ ('GET', 'security/certificates', SRR['zero_records']),
+ ('GET', 'security/certificates', SRR['one_security_certificate_record']),
+ # not found
+ ('GET', 'security/certificates', SRR['zero_records']),
+ ('GET', 'security/certificates', SRR['zero_records']),
+ ('GET', 'security/certificates', SRR['generic_error']),
+ ('GET', 'security/certificates', SRR['zero_records']),
+ # with 9.7 or earlier, name is not supported
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'security/certificates', SRR['one_security_certificate_record']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'vserver': 'svm_name'
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.get_security_certificate_uuid_rest('name', 'type') is not None
+ assert_warning_was_raised('certificate name not found, retrying with common_name and type type.')
+ # not found, neither with name nor common_name
+ error = 'Error fetching security certificate info for name of type: type on vserver: svm_name: not found.'
+ assert error in expect_and_capture_ansible_exception(my_obj.get_security_certificate_uuid_rest, 'fail', 'name', 'type')['msg']
+ # 9.7
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.get_security_certificate_uuid_rest('name', 'type') is not None
+ assert_warning_was_raised('name is not supported in 9.6 or 9.7, using common_name name and type type.')
+
+
+def test_warn_when_onboard_exists_and_only_one_passphrase_present():
+ ''' Warn if only one passphrase is present '''
+ register_responses([
+ # idempotency
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/key-managers', SRR['one_onboard_seckey_record']),
+ # idempotency
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/key-managers', SRR['one_onboard_seckey_record']),
+ ])
+ module_args = {
+ 'onboard': {
+ 'passphrase': 'passphrase_too_short',
+ },
+ 'use_rest': 'always'
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert_warning_was_raised('passphrase is ignored')
+ module_args = {
+ 'onboard': {
+ 'from_passphrase': 'passphrase_too_short',
+ },
+ 'use_rest': 'always'
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert_warning_was_raised('from_passphrase is ignored')
+
+
+def test_error_cannot_change_key_manager_type_rest():
+ ''' Warn if only one passphrase is present '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/key-managers', SRR['one_onboard_seckey_record']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/key-managers', SRR['one_external_seckey_record']),
+ ])
+ module_args = {
+ 'external': {
+ 'servers': ['0.1.2.3:5697', '1.2.3.4:5697']
+ },
+ 'use_rest': 'always'
+ }
+ error = 'Error, cannot modify existing configuraton: onboard key-manager is already installed, it needs to be deleted first.'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ module_args = {
+ 'onboard': {
+ 'from_passphrase': 'passphrase_too_short',
+ },
+ 'use_rest': 'always'
+ }
+ error = 'Error, cannot modify existing configuraton: external key-manager is already installed, it needs to be deleted first.'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_error_sync_repquires_passphrase_rest():
+ ''' Warn if only one passphrase is present '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/key-managers', SRR['one_onboard_seckey_record']),
+ ])
+ module_args = {
+ 'onboard': {
+ 'synchronize': True
+ },
+ 'use_rest': 'always'
+ }
+ error = 'Error: passphrase is required for synchronize.'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_return_not_present_when_svm_not_found_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/key-managers', SRR['error_svm_not_found']),
+ ])
+ module_args = {
+ 'state': 'absent',
+ 'vserver': 'svm_name',
+ 'use_rest': 'always'
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+@patch('time.sleep')
+def test_retry_on_create_error(dont_sleep):
+ """ when no key server is present, REST does not return a record """
+ ''' Test successfully add key manager'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/key-managers', SRR['zero_records']),
+ ('GET', 'security/certificates', SRR['one_security_certificate_record']),
+ ('GET', 'security/certificates', SRR['one_security_certificate_record']),
+ ('POST', 'security/key-managers', SRR['error_already_present']),
+ ('DELETE', 'security/key-managers', SRR['success']),
+ # we only retry once, erroring out
+ ('POST', 'security/key-managers', SRR['error_already_present']),
+
+ ])
+ module_args = {
+ 'external': {
+ 'servers': ['0.1.2.3:5696'],
+ 'client_certificate': 'client_certificate',
+ 'server_ca_certificates': ['server_ca_certificate']
+ },
+ 'vserver': 'svm_name',
+ 'use_rest': 'always'
+ }
+ error = 'Error creating key manager for cluster:'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_update_key_server_list():
+ ''' Validate servers are added/removed '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ # add/remove
+ ('DELETE', 'security/key-managers/123/key-servers/s1', SRR['success']),
+ ('DELETE', 'security/key-managers/123/key-servers/s3', SRR['success']),
+ ('POST', 'security/key-managers/123/key-servers', SRR['success']),
+ ('POST', 'security/key-managers/123/key-servers', SRR['success']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ }
+ # no requested change
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ current = {
+ 'external': {
+ 'servers': [
+ {'server': 's1'},
+ {'server': 's2'},
+ {'server': 's3'},
+ ]
+ }
+ }
+ # idempotent
+ assert my_obj.update_key_server_list(current) is None
+ my_obj.parameters['external'] = {
+ 'servers': [
+ {'server': 's1'},
+ {'server': 's2'},
+ {'server': 's3'},
+ ]
+ }
+ assert my_obj.update_key_server_list(current) is None
+ # delete/add
+ my_obj.parameters['external'] = {
+ 'servers': [
+ {'server': 's4'},
+ {'server': 's2'},
+ {'server': 's5'},
+ ]
+ }
+ my_obj.uuid = '123'
+ assert my_obj.update_key_server_list(current) is None
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_security_ssh.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_security_ssh.py
new file mode 100644
index 000000000..f7723db63
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_security_ssh.py
@@ -0,0 +1,164 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import patch_ansible, call_main
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_security_ssh import main as my_main # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+SRR = rest_responses({
+ 'ssh_security': (200, {
+ "records": [
+ {
+ "ciphers": [
+ "aes256_ctr",
+ "aes192_ctr",
+ "aes128_ctr"
+ ],
+ "max_authentication_retry_count": 0,
+ "svm": {
+ "name": "ansibleSVM",
+ "uuid": "02c9e252-41be-11e9-81d5-00a0986138f7"
+ },
+ "mac_algorithms": ["hmac_sha1", "hmac_sha2_512_etm"],
+ "key_exchange_algorithms": [
+ "diffie_hellman_group_exchange_sha256",
+ "diffie_hellman_group14_sha1"
+ ],
+ }],
+ "num_records": 1
+ }, None),
+ 'ssh_security_no_svm': (200, {
+ "records": [
+ {
+ "ciphers": [
+ "aes256_ctr",
+
+ ],
+ }],
+ "num_records": 1
+ }, None),
+})
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'use_rest': 'always',
+}
+
+
+def test_get_security_ssh_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'security/ssh/svms', SRR['generic_error']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'security/ssh', SRR['generic_error'])
+ ])
+ module_args = {"vserver": "AnsibleSVM"}
+ error = call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ msg = 'calling: security/ssh/svms: got Expected error.'
+ assert msg in error
+ error = call_main(my_main, DEFAULT_ARGS, fail=True)['msg']
+
+
+def test_modify_security_ssh_algorithms_rest():
+ ''' test modify algorithms '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'security/ssh/svms', SRR['ssh_security']),
+ ('PATCH', 'security/ssh/svms/02c9e252-41be-11e9-81d5-00a0986138f7', SRR['empty_good']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'security/ssh', SRR['ssh_security']),
+ ('PATCH', 'security/ssh', SRR['empty_good']),
+ ])
+ module_args = {
+ "vserver": "AnsibleSVM",
+ "ciphers": ["aes256_ctr", "aes192_ctr"],
+ "mac_algorithms": ["hmac_sha1", "hmac_sha2_512_etm"],
+ "key_exchange_algorithms": ["diffie_hellman_group_exchange_sha256"],
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ module_args.pop('vserver')
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_security_ssh_retry_rest():
+ ''' test modify maximum retry count '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'security/ssh/svms', SRR['ssh_security']),
+ ('PATCH', 'security/ssh/svms/02c9e252-41be-11e9-81d5-00a0986138f7', SRR['empty_good']),
+ ])
+ module_args = {
+ "vserver": "AnsibleSVM",
+ "max_authentication_retry_count": 2,
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)
+
+
+def test_error_modify_security_ssh_rest():
+ ''' test modify algorithms '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'security/ssh/svms', SRR['ssh_security']),
+ ('PATCH', 'security/ssh/svms/02c9e252-41be-11e9-81d5-00a0986138f7', SRR['generic_error']),
+ ])
+ module_args = {
+ "vserver": "AnsibleSVM",
+ "ciphers": ["aes256_ctr", "aes192_ctr"],
+ "max_authentication_retry_count": 2,
+ "mac_algorithms": ["hmac_sha1", "hmac_sha2_512_etm"],
+ "key_exchange_algorithms": ["diffie_hellman_group_exchange_sha256"],
+ }
+ error = call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ msg = 'calling: security/ssh/svms/02c9e252-41be-11e9-81d5-00a0986138f7: got Expected error.'
+ assert msg in error
+
+
+def test_error_empty_security_ssh_rest():
+ ''' Validation of input parameters '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1'])
+ ])
+ module_args = {
+ "ciphers": []
+ }
+ error = call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ msg = 'Removing all SSH ciphers is not supported. SSH login would fail. ' + \
+ 'There must be at least one ciphers associated with the SSH configuration.'
+ assert msg in error
+
+
+def test_module_error_ontap_version():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ])
+ module_args = {'use_rest': 'always'}
+ error = call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert 'Error: na_ontap_security_ssh only supports REST, and requires ONTAP 9.10.1 or later' in error
+
+
+def test_module_error_no_svm_uuid():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'security/ssh/svms', SRR['ssh_security_no_svm']),
+ ])
+ module_args = {
+ "vserver": "AnsibleSVM",
+ "ciphers": ["aes256_ctr", "aes192_ctr"]
+ }
+ error = call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert 'Error: no uuid found for the SVM' in error
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_service_policy.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_service_policy.py
new file mode 100644
index 000000000..c11c44059
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_service_policy.py
@@ -0,0 +1,402 @@
+# (c) 2018-2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test for ONTAP service policy Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import pytest
+import sys
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import\
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_error_message, rest_responses
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import\
+ assert_no_warnings, expect_and_capture_ansible_exception, call_main, create_module, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_service_policy import NetAppOntapServicePolicy as my_module, main as my_main
+
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'use_rest': 'always',
+ 'name': 'sp123',
+}
+
+
+# REST API canned responses when mocking send_request
+SRR = rest_responses({
+ 'one_sp_record': (200, {
+ "records": [{
+ 'name': 'sp123',
+ 'uuid': 'uuid123',
+ 'svm': dict(name='vserver'),
+ 'services': ['data_core'],
+ 'scope': 'svm',
+ 'ipspace': dict(name='ipspace')
+ }],
+ 'num_records': 1
+ }, None),
+ 'two_sp_records': (200, {
+ "records": [
+ {
+ 'name': 'sp123',
+ },
+ {
+ 'name': 'sp124',
+ }],
+ 'num_records': 2
+ }, None),
+}, False)
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ module_args = {
+ 'hostname': ''
+ }
+ error = 'missing required arguments: name'
+ assert error == call_main(my_main, module_args, fail=True)['msg']
+
+
+def test_ensure_get_called():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'network/ip/service-policies', SRR['one_sp_record']),
+ ])
+ module_args = {
+ 'services': ['data_core'],
+ 'vserver': 'vserver',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed'] is False
+ assert_no_warnings()
+
+
+def test_ensure_create_called():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'network/ip/service-policies', SRR['zero_records']),
+ ('POST', 'network/ip/service-policies', SRR['empty_good']),
+ ])
+ module_args = {
+ 'services': ['data_core'],
+ 'vserver': 'vserver',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed'] is True
+ assert_no_warnings()
+
+
+def test_ensure_create_called_cluster():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'network/ip/service-policies', SRR['zero_records']),
+ ('POST', 'network/ip/service-policies', SRR['empty_good']),
+ ])
+ module_args = {
+ 'ipspace': 'ipspace',
+ 'services': ['data_core']
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed'] is True
+ assert_no_warnings()
+
+
+def test_ensure_create_idempotent():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'network/ip/service-policies', SRR['one_sp_record']),
+ ])
+ module_args = {
+ 'services': ['data_core'],
+ 'vserver': 'vserver',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed'] is False
+ assert_no_warnings()
+
+
+def test_ensure_modify_called():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'network/ip/service-policies', SRR['one_sp_record']),
+ ('PATCH', 'network/ip/service-policies/uuid123', SRR['empty_good']),
+ ])
+ module_args = {
+ 'services': ['data_nfs'],
+ 'vserver': 'vserver',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed'] is True
+ assert_no_warnings()
+
+
+def test_ensure_modify_called_no_service():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'network/ip/service-policies', SRR['one_sp_record']),
+ ('PATCH', 'network/ip/service-policies/uuid123', SRR['empty_good']),
+ ])
+ module_args = {
+ 'services': ['no_service'],
+ 'vserver': 'vserver',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed'] is True
+ assert_no_warnings()
+
+
+def test_ensure_delete_called():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'network/ip/service-policies', SRR['one_sp_record']),
+ ('DELETE', 'network/ip/service-policies/uuid123', SRR['empty_good']),
+ ])
+ module_args = {
+ 'state': 'absent',
+ 'vserver': 'vserver',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed'] is True
+ assert_no_warnings()
+
+
+def test_ensure_delete_idempotent():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'network/ip/service-policies', SRR['zero_records']),
+ ])
+ module_args = {
+ 'state': 'absent',
+ 'vserver': 'vserver',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed'] is False
+ assert_no_warnings()
+
+
+def test_negative_extra_record():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'network/ip/service-policies', SRR['two_sp_records']),
+ ])
+ module_args = {
+ 'services': ['data_nfs'],
+ 'vserver': 'vserver',
+ }
+ error = 'Error in get_service_policy: calling: network/ip/service-policies: unexpected response'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert_no_warnings()
+
+
+def test_negative_ipspace_required_1():
+ module_args = {
+ 'services': ['data_nfs'],
+ 'vserver': None,
+ }
+ error = "vserver is None but all of the following are missing: ipspace"
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert_no_warnings()
+
+
+def test_negative_ipspace_required_2():
+ module_args = {
+ 'scope': 'cluster',
+ 'services': ['data_nfs'],
+ 'vserver': None,
+ }
+ error = "scope is cluster but all of the following are missing: ipspace"
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert_no_warnings()
+
+
+def test_negative_ipspace_required_3():
+ module_args = {
+ 'services': ['data_nfs'],
+ }
+ error = "one of the following is required: ipspace, vserver"
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert_no_warnings()
+
+
+def test_negative_vserver_required_1():
+ module_args = {
+ 'scope': 'svm',
+ 'services': ['data_nfs'],
+ }
+ error = "one of the following is required: ipspace, vserver"
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert_no_warnings()
+
+
+def test_negative_vserver_required_2():
+ module_args = {
+ 'ipspace': None,
+ 'scope': 'svm',
+ 'services': ['data_nfs'],
+ }
+ error = "scope is svm but all of the following are missing: vserver"
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert_no_warnings()
+
+
+def test_negative_vserver_required_3():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ])
+ module_args = {
+ 'ipspace': None,
+ 'scope': 'svm',
+ 'services': ['data_nfs'],
+ 'vserver': None,
+ }
+ error = 'Error: vserver cannot be None when "scope: svm" is specified.'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert_no_warnings()
+
+
+def test_negative_vserver_not_required():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ])
+ module_args = {
+ 'ipspace': None,
+ 'scope': 'cluster',
+ 'services': ['data_nfs'],
+ 'vserver': 'vserver',
+ }
+ error = 'Error: vserver cannot be set when "scope: cluster" is specified. Got: vserver'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert_no_warnings()
+
+
+def test_negative_no_service_not_alone():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ])
+ module_args = {
+ 'scope': 'svm',
+ 'services': ['data_nfs', 'no_service'],
+ 'vserver': 'vserver',
+ }
+ error = "Error: no other service can be present when no_service is specified."
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert_no_warnings()
+
+
+def test_negative_no_service_not_alone_with_cluster_scope():
+ module_args = {
+ 'ipspace': 'ipspace',
+ 'scope': 'cluster',
+ 'services': ['data_nfs', 'no_service'],
+ 'vserver': 'vserver',
+ }
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ])
+ error = "Error: no other service can be present when no_service is specified."
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert_no_warnings()
+
+
+def test_negative_extra_arg_in_modify():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'network/ip/service-policies', SRR['one_sp_record']),
+ ])
+ module_args = {
+ 'ipspace': 'ipspace',
+ 'scope': 'cluster',
+ 'services': ['data_nfs'],
+ }
+ error = "Error: attributes not supported in modify: {'scope': 'cluster'}"
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert_no_warnings()
+
+
+def test_negative_empty_body_in_modify():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ])
+ module_args = {
+ 'scope': 'svm',
+ 'services': ['data_nfs'],
+ 'vserver': 'vserver',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ current = dict(uuid='')
+ modify = {}
+ error = 'Error: nothing to change - modify called with: {}'
+ assert error in expect_and_capture_ansible_exception(my_obj.modify_service_policy, 'fail', current, modify)['msg']
+ assert_no_warnings()
+
+
+def test_negative_create_called():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'network/ip/service-policies', SRR['zero_records']),
+ ('POST', 'network/ip/service-policies', SRR['generic_error']),
+ ])
+ module_args = {
+ 'scope': 'svm',
+ 'services': ['data_nfs'],
+ 'vserver': 'vserver',
+ }
+ error = rest_error_message('Error in create_service_policy', 'network/ip/service-policies')
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert_no_warnings()
+
+
+def test_negative_delete_called():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'network/ip/service-policies', SRR['one_sp_record']),
+ ('DELETE', 'network/ip/service-policies/uuid123', SRR['generic_error']),
+ ])
+ module_args = {
+ 'state': 'absent',
+ 'vserver': 'vserver',
+ }
+ error = rest_error_message('Error in delete_service_policy', 'network/ip/service-policies/uuid123')
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert_no_warnings()
+
+
+def test_negative_modify_called():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'network/ip/service-policies', SRR['one_sp_record']),
+ ('PATCH', 'network/ip/service-policies/uuid123', SRR['generic_error']),
+ ])
+ module_args = {
+ 'services': ['data_nfs'],
+ 'vserver': 'vserver',
+ }
+ error = rest_error_message('Error in modify_service_policy', 'network/ip/service-policies/uuid123')
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert_no_warnings()
+
+
+def test_negative_unknown_services():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ])
+ module_args = {
+ 'services': ['data_nfs9'],
+ 'vserver': 'vserver',
+ }
+ error = 'Error: unknown service: data_nfs9. New services may need to be added to "additional_services".'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert_no_warnings()
+ module_args = {
+ 'services': ['data_nfs9', 'data_cifs', 'dummy'],
+ 'vserver': 'vserver',
+ }
+ error = call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ for needle in ['Error: unknown services:', 'data_nfs9', 'dummy']:
+ assert needle in error
+ assert 'data_cifs' not in error
+ assert_no_warnings()
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_service_processor_network.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_service_processor_network.py
new file mode 100644
index 000000000..c8c249810
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_service_processor_network.py
@@ -0,0 +1,296 @@
+''' unit test template for ONTAP Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import patch_ansible,\
+ create_module, create_and_apply, expect_and_capture_ansible_exception
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke,\
+ register_responses
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_service_processor_network \
+ import NetAppOntapServiceProcessorNetwork as sp_module # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+def mock_args(enable=False, use_rest=False):
+ data = {
+ 'node': 'test-vsim1',
+ 'is_enabled': enable,
+ 'address_type': 'ipv4',
+ 'hostname': 'host',
+ 'username': 'admin',
+ 'password': 'password',
+ 'use_rest': 'never'
+ }
+ if enable is True:
+ data['is_enabled'] = enable
+ data['ip_address'] = '1.1.1.1'
+ data['gateway_ip_address'] = '2.2.2.2'
+ data['netmask'] = '255.255.248.0'
+ data['dhcp'] = 'none'
+ if use_rest:
+ data['use_rest'] = 'always'
+ return data
+
+
+sp_enabled_info = {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'service-processor-network-info': {
+ 'node': 'test-vsim1',
+ 'is-enabled': 'true',
+ 'address-type': 'ipv4',
+ 'dhcp': 'v4',
+ 'gateway-ip-address': '2.2.2.2',
+ 'netmask': '255.255.248.0',
+ 'ip-address': '1.1.1.1',
+ 'setup-status': 'succeeded'
+ }
+ }
+}
+
+sp_disabled_info = {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'service-processor-network-info': {
+ 'node-name': 'test-vsim1',
+ 'is-enabled': 'false',
+ 'address-type': 'ipv4',
+ 'setup-status': 'not_setup'
+ }
+ }
+}
+
+sp_status_info = {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'service-processor-network-info': {
+ 'node-name': 'test-vsim1',
+ 'is-enabled': 'false',
+ 'address-type': 'ipv4',
+ 'setup-status': 'in_progress'
+ }
+ }
+}
+
+ZRR = zapi_responses({
+ 'sp_enabled_info': build_zapi_response(sp_enabled_info),
+ 'sp_disabled_info': build_zapi_response(sp_disabled_info),
+ 'sp_status_info': build_zapi_response(sp_status_info)
+})
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ # with python 2.6, dictionaries are not ordered
+ fragments = ["missing required arguments:", "hostname", "node", "address_type"]
+ error = create_module(sp_module, {}, fail=True)['msg']
+ for fragment in fragments:
+ assert fragment in error
+
+
+def test_modify_error_on_disabled_sp():
+ ''' a more interesting test '''
+ register_responses([
+ ('service-processor-network-get-iter', ZRR['sp_disabled_info'])
+ ])
+ error = 'Error: Cannot modify a service processor network if it is disabled in ZAPI'
+ assert error in create_and_apply(sp_module, mock_args(), {'ip_address': '1.1.1.1'}, 'error')['msg']
+
+
+def test_modify_error_on_disabe_dhcp_without_ip():
+ ''' a more interesting test '''
+ register_responses([
+ ('service-processor-network-get-iter', ZRR['sp_enabled_info'])
+ ])
+ error = 'Error: To disable dhcp, configure ip-address, netmask and gateway details manually.'
+ assert error in create_and_apply(sp_module, mock_args(enable=True), None, fail=True)['msg']
+
+
+def test_modify_error_of_params_disabled_false():
+ ''' a more interesting test '''
+ register_responses([
+ ('service-processor-network-get-iter', ZRR['sp_enabled_info'])
+ ])
+ error = 'Error: Cannot modify any other parameter for a service processor network if option "is_enabled" is set to false.'
+ assert error in create_and_apply(sp_module, mock_args(), {'ip_address': '2.1.1.1'}, 'error')['msg']
+
+
+def test_modify_sp():
+ ''' a more interesting test '''
+ register_responses([
+ ('service-processor-network-get-iter', ZRR['sp_enabled_info']),
+ ('service-processor-network-modify', ZRR['success'])
+ ])
+ assert create_and_apply(sp_module, mock_args(enable=True), {'ip_address': '3.3.3.3'})['changed']
+
+
+@patch('time.sleep')
+def test_modify_sp_wait(sleep):
+ ''' a more interesting test '''
+ register_responses([
+ ('service-processor-network-get-iter', ZRR['sp_enabled_info']),
+ ('service-processor-network-modify', ZRR['success']),
+ ('service-processor-network-get-iter', ZRR['sp_enabled_info'])
+ ])
+ args = {'ip_address': '3.3.3.3', 'wait_for_completion': True}
+ assert create_and_apply(sp_module, mock_args(enable=True), args)['changed']
+
+
+def test_non_existing_sp():
+ register_responses([
+ ('service-processor-network-get-iter', ZRR['no_records'])
+ ])
+ error = 'Error No Service Processor for node: test-vsim1'
+ assert create_and_apply(sp_module, mock_args(), fail=True)['msg']
+
+
+@patch('time.sleep')
+def test_wait_on_sp_status(sleep):
+ register_responses([
+ ('service-processor-network-get-iter', ZRR['sp_enabled_info']),
+ ('service-processor-network-modify', ZRR['success']),
+ ('service-processor-network-get-iter', ZRR['sp_status_info']),
+ ('service-processor-network-get-iter', ZRR['sp_status_info']),
+ ('service-processor-network-get-iter', ZRR['sp_status_info']),
+ ('service-processor-network-get-iter', ZRR['sp_status_info']),
+ ('service-processor-network-get-iter', ZRR['sp_enabled_info'])
+ ])
+ args = {'ip_address': '3.3.3.3', 'wait_for_completion': True}
+ assert create_and_apply(sp_module, mock_args(enable=True), args)['changed']
+
+
+def test_if_all_methods_catch_exception():
+ ''' test error zapi - get/modify'''
+ register_responses([
+ ('service-processor-network-get-iter', ZRR['error']),
+ ('service-processor-network-get-iter', ZRR['error']),
+ ('service-processor-network-modify', ZRR['error'])
+ ])
+ sp_obj = create_module(sp_module, mock_args())
+
+ assert 'Error fetching service processor network info' in expect_and_capture_ansible_exception(sp_obj.get_service_processor_network, 'fail')['msg']
+ assert 'Error fetching service processor network status' in expect_and_capture_ansible_exception(sp_obj.get_sp_network_status, 'fail')['msg']
+ assert 'Error modifying service processor network' in expect_and_capture_ansible_exception(sp_obj.modify_service_processor_network, 'fail', {})['msg']
+
+
+SRR = rest_responses({
+ 'sp_enabled_info': (200, {"records": [{
+ 'name': 'ansdev-stor-1',
+ 'service_processor': {
+ 'dhcp_enabled': False,
+ 'firmware_version': '3.10',
+ 'ipv4_interface': {
+ 'address': '1.1.1.1',
+ 'gateway': '2.2.2.2',
+ 'netmask': '255.255.248.0'
+ },
+ 'link_status': 'up',
+ 'state': 'online'
+ },
+ 'uuid': '5dd7aed0'}
+ ]}, None),
+ 'sp_disabled_info': (200, {"records": [{
+ 'name': 'ansdev-stor-1',
+ 'service_processor': {
+ 'firmware_version': '3.10',
+ 'link_status': 'up',
+ 'state': 'online'
+ },
+ 'uuid': '5dd7aed0'}
+ ]}, None)
+})
+
+
+def test_modify_sp_rest():
+ ''' modify sp in rest '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'cluster/nodes', SRR['sp_enabled_info']),
+ ('PATCH', 'cluster/nodes/5dd7aed0', SRR['success'])
+ ])
+ assert create_and_apply(sp_module, mock_args(enable=True, use_rest=True), {'ip_address': '3.3.3.3'})['changed']
+
+
+def test_non_existing_sp_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'cluster/nodes', SRR['empty_records'])
+ ])
+ error = 'Error No Service Processor for node: test-vsim1'
+ assert create_and_apply(sp_module, mock_args(enable=True, use_rest=True), fail=True)['msg']
+
+
+def test_if_all_methods_catch_exception_rest():
+ ''' test error zapi - get/modify'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'cluster/nodes', SRR['generic_error']),
+ ('PATCH', 'cluster/nodes/5dd7aed0', SRR['generic_error'])
+ ])
+ sp_obj = create_module(sp_module, mock_args(use_rest=True))
+ sp_obj.uuid = '5dd7aed0'
+ assert 'Error fetching service processor network info' in expect_and_capture_ansible_exception(sp_obj.get_service_processor_network, 'fail')['msg']
+ assert 'Error modifying service processor network' in expect_and_capture_ansible_exception(sp_obj.modify_service_processor_network, 'fail', {})['msg']
+
+
+def test_disable_sp_rest():
+ ''' disable not supported in REST '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'cluster/nodes', SRR['sp_enabled_info'])
+ ])
+ error = 'Error: disable service processor network status not allowed in REST'
+ assert error in create_and_apply(sp_module, mock_args(enable=True, use_rest=True), {'is_enabled': False}, 'fail')['msg']
+
+
+def test_enable_sp_rest_without_ip_or_dhcp():
+ ''' enable requires ip or dhcp in REST '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'cluster/nodes', SRR['sp_disabled_info'])
+ ])
+ error = 'Error: enable service processor network requires dhcp or ip_address,netmask,gateway details in REST.'
+ assert error in create_and_apply(sp_module, mock_args(use_rest=True), {'is_enabled': True}, 'fail')['msg']
+
+
+@patch('time.sleep')
+def test_wait_on_sp_status_rest(sleep):
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'cluster/nodes', SRR['sp_disabled_info']),
+ ('PATCH', 'cluster/nodes/5dd7aed0', SRR['success']),
+ ('GET', 'cluster/nodes', SRR['sp_disabled_info']),
+ ('GET', 'cluster/nodes', SRR['sp_disabled_info']),
+ ('GET', 'cluster/nodes', SRR['sp_enabled_info'])
+ ])
+ args = {'ip_address': '1.1.1.1', 'wait_for_completion': True}
+ assert create_and_apply(sp_module, mock_args(enable=True, use_rest=True), args)['changed']
+
+
+def test_error_dhcp_for_address_type_ipv6():
+ ''' dhcp cannot be disabled if manual interface options not set'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1'])
+ ])
+ error = 'Error: dhcp cannot be set for address_type: ipv6'
+ args = {'address_type': 'ipv6', 'dhcp': 'v4'}
+ assert error in create_module(sp_module, mock_args(use_rest=True), args, fail=True)['msg']
+
+
+def test_error_dhcp_enable_and_set_manual_options_rest():
+ ''' dhcp enable and manual interface options set together'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1'])
+ ])
+ error = "Error: set dhcp v4 or all of 'ip_address, gateway_ip_address, netmask'."
+ args = {'dhcp': 'v4'}
+ assert error in create_module(sp_module, mock_args(use_rest=True, enable=True), args, fail=True)['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_snaplock_clock.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_snaplock_clock.py
new file mode 100644
index 000000000..6177f2a29
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_snaplock_clock.py
@@ -0,0 +1,228 @@
+# (c) 2021, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test for ONTAP fpolicy ext engine Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_snaplock_clock \
+ import NetAppOntapSnaplockClock as my_module # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+class MockONTAPConnection():
+ ''' mock server connection to ONTAP host '''
+
+ def __init__(self, kind=None):
+ ''' save arguments '''
+ self.type = kind
+ self.xml_in = None
+ self.xml_out = None
+
+ def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
+ ''' mock invoke_successfully returning xml data '''
+ self.xml_in = xml
+ if self.type == 'snaplock_clock_set':
+ xml = self.build_snaplock_clock_info_set()
+ elif self.type == 'snaplock_clock_not_set':
+ xml = self.build_snaplock_clock_info_not_set()
+ elif self.type == 'snaplock_clock_fail':
+ raise netapp_utils.zapi.NaApiError(code='TEST', message="This exception is from the unit test")
+ self.xml_out = xml
+ return xml
+
+ @staticmethod
+ def build_snaplock_clock_info_set():
+ ''' build xml data for snaplock-get-node-compliance-clock '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ data = {
+ 'snaplock-node-compliance-clock': {
+ 'compliance-clock-info': {
+ 'formatted-snaplock-compliance-clock': 'Tue Mar 23 09:56:07 EDT 2021 -04:00'
+ }
+ }
+ }
+ xml.translate_struct(data)
+ return xml
+
+ @staticmethod
+ def build_snaplock_clock_info_not_set():
+ ''' build xml data for snaplock-get-node-compliance-clock '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ data = {
+ 'snaplock-node-compliance-clock': {
+ 'compliance-clock-info': {
+ 'formatted-snaplock-compliance-clock': 'ComplianceClock is not configured.'
+ }
+ }
+ }
+ xml.translate_struct(data)
+ return xml
+
+
+def default_args():
+ args = {
+ 'node': 'node1',
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'use_rest': 'always'
+ }
+ return args
+
+
+# REST API canned responses when mocking send_request
+SRR = {
+ # common responses
+ 'is_rest': (200, dict(version=dict(generation=9, major=9, minor=0, full='dummy')), None),
+ 'is_rest_9_8': (200, dict(version=dict(generation=9, major=8, minor=0, full='dummy')), None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'zero_record': (200, dict(records=[], num_records=0), None),
+ 'one_record_uuid': (200, dict(records=[dict(uuid='a1b2c3')], num_records=1), None),
+ 'end_of_sequence': (500, None, "Unexpected call to send_request"),
+ 'generic_error': (400, None, "Expected error"),
+ 'snaplock_clock_set_record': (200, {
+ "records": [{
+ 'node': 'node1',
+ 'time': 'Tue Mar 23 09:56:07 EDT 2021 -04:00'
+ }],
+ 'num_records': 1
+ }, None),
+ 'snaplock_clock_not_set_record': (200, {
+ "records": [{
+ 'node': 'node1',
+ 'time': 'ComplianceClock is not configured.'
+ }],
+ 'num_records': 1
+ }, None)
+
+}
+
+
+def get_snaplock_clock_mock_object(cx_type='zapi', kind=None):
+ snaplock_clock_obj = my_module()
+ if cx_type == 'zapi':
+ if kind is None:
+ snaplock_clock_obj.server = MockONTAPConnection()
+ else:
+ snaplock_clock_obj.server = MockONTAPConnection(kind=kind)
+ return snaplock_clock_obj
+
+
+def test_module_fail_when_required_args_missing(patch_ansible):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ my_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+
+def test_ensure_get_called(patch_ansible):
+ ''' test get_snaplock_clock for non initialized clock'''
+ args = dict(default_args())
+ args['use_rest'] = 'never'
+ set_module_args(args)
+ print('starting')
+ my_obj = my_module()
+ print('use_rest:', my_obj.use_rest)
+ my_obj.server = MockONTAPConnection(kind='snaplock_clock_not_set')
+ assert my_obj.get_snaplock_node_compliance_clock is not None
+
+
+def test_rest_missing_arguments(patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' test for missing arguments '''
+ args = dict(default_args())
+ del args['hostname']
+ set_module_args(args)
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_module()
+ msg = 'missing required arguments: hostname'
+ assert exc.value.args[0]['msg'] == msg
+
+
+@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_snaplock_clock.NetAppOntapSnaplockClock.set_snaplock_node_compliance_clock')
+def test_successful_initialize(self, patch_ansible):
+ ''' Initializing snaplock_clock and test idempotency '''
+ args = dict(default_args())
+ args['use_rest'] = 'never'
+ args['feature_flags'] = {'no_cserver_ems': True}
+ set_module_args(args)
+ my_obj = my_module()
+ my_obj.server = MockONTAPConnection(kind='snaplock_clock_not_set')
+ with patch.object(my_module, 'set_snaplock_node_compliance_clock', wraps=my_obj.set_snaplock_node_compliance_clock) as mock_create:
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Create: ' + repr(exc.value))
+ assert exc.value.args[0]['changed']
+ mock_create.assert_called_with()
+ # test idempotency
+ args = dict(default_args())
+ args['use_rest'] = 'never'
+ args['feature_flags'] = {'no_cserver_ems': True}
+ set_module_args(args)
+ my_obj = my_module()
+ my_obj.server = MockONTAPConnection('snaplock_clock_set')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Create: ' + repr(exc.value))
+ assert not exc.value.args[0]['changed']
+
+
+def test_if_all_methods_catch_exception(patch_ansible):
+ args = dict(default_args())
+ args['use_rest'] = 'never'
+ set_module_args(args)
+ my_obj = my_module()
+ my_obj.server = MockONTAPConnection('snaplock_clock_fail')
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.set_snaplock_node_compliance_clock()
+ assert 'Error setting snaplock compliance clock for node ' in exc.value.args[0]['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_initialize(mock_request, patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' Initialize snaplock clock '''
+ args = dict(default_args())
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['snaplock_clock_not_set_record'], # get
+ SRR['empty_good'], # post
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is True
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 3
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_initialize_no_action(mock_request, patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' Initialize snaplock clock idempotent '''
+ args = dict(default_args())
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['snaplock_clock_set_record'], # get
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is False
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 2
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_snapmirror.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_snapmirror.py
new file mode 100644
index 000000000..9ba179279
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_snapmirror.py
@@ -0,0 +1,1894 @@
+''' unit tests ONTAP Ansible module: na_ontap_snapmirror '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch, Mock
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import\
+ assert_no_warnings, assert_warning_was_raised, expect_and_capture_ansible_exception, call_main, create_module, patch_ansible, print_warnings
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import\
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_error_message, rest_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_snapmirror \
+ import NetAppONTAPSnapmirror as my_module, main as my_main
+
+HAS_SF_COMMON = True
+try:
+ from solidfire.common import ApiServerError
+except ImportError:
+ HAS_SF_COMMON = False
+
+if not HAS_SF_COMMON:
+ pytestmark = pytest.mark.skip('skipping as missing required solidfire.common')
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+DEFAULT_ARGS = {
+ "hostname": "10.193.189.206",
+ "username": "admin",
+ "password": "netapp123",
+ "https": "yes",
+ "validate_certs": "no",
+ "state": "present",
+ "initialize": "True",
+ "relationship_state": "active",
+ "source_path": "svmsrc3:volsrc1",
+ "destination_path": "svmdst3:voldst1",
+ "relationship_type": "extended_data_protection"
+}
+
+
+def sm_rest_info(state, healthy, transfer_state=None, destination_path=DEFAULT_ARGS['destination_path']):
+ record = {
+ 'uuid': 'b5ee4571-5429-11ec-9779-005056b39a06',
+ 'destination': {
+ 'path': destination_path
+ },
+ 'policy': {
+ 'name': 'MirrorAndVault'
+ },
+ 'state': state,
+ 'healthy': healthy,
+ }
+ if transfer_state:
+ record['transfer'] = {'state': transfer_state}
+ if transfer_state == 'transferring':
+ record['transfer']['uuid'] = 'xfer_uuid'
+ if healthy is False:
+ record['unhealthy_reason'] = 'this is why the relationship is not healthy.'
+ record['transfer_schedule'] = {'name': 'abc'}
+
+ return {
+ 'records': [record],
+ 'num_records': 1
+ }
+
+
+sm_policies = {
+ # We query only on the policy name, as it can be at the vserver or cluster scope.
+ # So we can have ghost records from other SVMs.
+ 'records': [
+ {
+ 'type': 'sync',
+ 'svm': {'name': 'other'}
+ },
+ {
+ 'type': 'async',
+ 'svm': {'name': 'svmdst3'}
+ },
+ {
+ 'type': 'svm_invalid',
+ 'svm': {'name': 'bad_type'}
+ },
+ {
+ 'type': 'system_invalid',
+ },
+ ],
+ 'num_records': 4,
+}
+
+
+svm_peer_info = {
+ 'records': [{
+ 'peer': {
+ 'svm': {'name': 'vserver'},
+ 'cluster': {'name': 'cluster'},
+ }
+ }]
+}
+
+
+# REST API canned responses when mocking send_request
+SRR = rest_responses({
+ 'sm_get_uninitialized': (200, sm_rest_info('uninitialized', True), None),
+ 'sm_get_uninitialized_xfering': (200, sm_rest_info('uninitialized', True, 'transferring'), None),
+ 'sm_get_mirrored': (200, sm_rest_info('snapmirrored', True, 'success'), None),
+ 'sm_get_restore': (200, sm_rest_info('snapmirrored', True, 'success', destination_path=DEFAULT_ARGS['source_path']), None),
+ 'sm_get_paused': (200, sm_rest_info('paused', True, 'success'), None),
+ 'sm_get_broken': (200, sm_rest_info('broken_off', True, 'success'), None),
+ 'sm_get_data_transferring': (200, sm_rest_info('transferring', True, 'transferring'), None),
+ 'sm_get_abort': (200, sm_rest_info('sm_get_abort', False, 'failed'), None),
+ 'sm_get_resync': (200, {
+ 'uuid': 'b5ee4571-5429-11ec-9779-005056b39a06',
+ 'description': 'PATCH /api/snapmirror/relationships/1c4467ca-5434-11ec-9779-005056b39a06',
+ 'state': 'success',
+ 'message': 'success',
+ 'code': 0,
+ }, None),
+ 'job_status': (201, {
+ 'job': {
+ 'uuid': '3a23a60e-542c-11ec-9779-005056b39a06',
+ '_links': {
+ 'self': {
+ 'href': '/api/cluster/jobs/3a23a60e-542c-11ec-9779-005056b39a06'
+ }
+ }
+ }
+ }, None),
+ 'sm_policies': (200, sm_policies, None),
+ 'svm_peer_info': (200, svm_peer_info, None),
+})
+
+
+def sm_info(mirror_state, status, quiesce_status, relationship_type='extended_data_protection', source='ansible:volsrc1'):
+
+ return {
+ 'num-records': 1,
+ 'status': quiesce_status,
+ 'attributes-list': {
+ 'snapmirror-info': {
+ 'mirror-state': mirror_state,
+ 'schedule': None,
+ 'source-location': source,
+ 'relationship-status': status,
+ 'policy': 'ansible_policy',
+ 'relationship-type': relationship_type,
+ 'max-transfer-rate': 10000,
+ 'identity-preserve': 'true',
+ 'last-transfer-error': 'last_transfer_error',
+ 'is-healthy': 'true',
+ 'unhealthy-reason': 'unhealthy_reason',
+ },
+ 'snapmirror-destination-info': {
+ 'destination-location': 'ansible'
+ }
+ }
+ }
+
+
+# we only test for existence, contents do not matter
+volume_info = {
+ 'num-records': 1,
+}
+
+
+vserver_peer_info = {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'vserver-peer-info': {
+ 'remote-vserver-name': 'svmsrc3',
+ 'peer-cluster': 'cluster',
+ }
+ }
+}
+
+
+ZRR = zapi_responses({
+ 'sm_info': build_zapi_response(sm_info(None, 'idle', 'passed')),
+ 'sm_info_broken_off': build_zapi_response(sm_info('broken_off', 'idle', 'passed')),
+ 'sm_info_snapmirrored': build_zapi_response(sm_info('snapmirrored', 'idle', 'passed')),
+ 'sm_info_snapmirrored_from_element': build_zapi_response(sm_info('snapmirrored', 'idle', 'passed', source='10.10.10.11:/lun/1000')),
+ 'sm_info_snapmirrored_to_element': build_zapi_response(sm_info('snapmirrored', 'idle', 'passed', source='svmsrc3:volsrc1')),
+ 'sm_info_snapmirrored_load_sharing': build_zapi_response(sm_info('snapmirrored', 'idle', 'passed', 'load_sharing')),
+ 'sm_info_snapmirrored_vault': build_zapi_response(sm_info('snapmirrored', 'idle', 'passed', 'vault')),
+ 'sm_info_snapmirrored_quiesced': build_zapi_response(sm_info('snapmirrored', 'quiesced', 'passed')),
+ 'sm_info_uninitialized': build_zapi_response(sm_info('uninitialized', 'idle', 'passed')),
+ 'sm_info_uninitialized_load_sharing': build_zapi_response(sm_info('uninitialized', 'idle', 'passed', 'load_sharing')),
+ 'volume_info': build_zapi_response(volume_info),
+ 'vserver_peer_info': build_zapi_response(vserver_peer_info)
+})
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ msg = "missing required arguments: hostname"
+ assert create_module(my_module, {}, fail=True)['msg'] == msg
+
+
+def test_module_fail_unsuuported_rest_options():
+ ''' required arguments are reported as errors '''
+ module_args = {
+ "use_rest": "never",
+ "create_destination": {"enabled": True},
+ }
+ errors = [
+ 'Error: using any of',
+ 'create_destination',
+ 'requires ONTAP 9.7 or later and REST must be enabled - using ZAPI.'
+ ]
+ for error in errors:
+ assert error in create_module(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+if netapp_utils.has_netapp_lib():
+ zapi_create_responses = [
+ ('ZAPI', 'snapmirror-get-iter', ZRR['no_records']), # ONTAP to ONTAP
+ ('ZAPI', 'snapmirror-create', ZRR['success']),
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']),
+ ('ZAPI', 'snapmirror-initialize', ZRR['sm_info']),
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']), # check status
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']), # check health
+ ]
+else:
+ zapi_create_responses = []
+
+
+def test_negative_zapi_unsupported_options():
+ ''' ZAPI unsupported options '''
+ register_responses([
+ ])
+ module_args = {
+ "use_rest": "never",
+ "identity_preservation": "full"
+ }
+ msg = "Error: The option identity_preservation is supported only with REST."
+ assert call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg'] == msg
+
+
+@patch('time.sleep')
+def test_successful_create_with_source(dont_sleep):
+ ''' creating snapmirror and testing idempotency '''
+ # earlier versions of pythons don't support *zapi_create_responses
+ responses = list(zapi_create_responses)
+ responses.extend([
+ # idempotency
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']), # ONTAP to ONTAP
+ ('ZAPI', 'vserver-peer-get-iter', ZRR['vserver_peer_info']), # validate source svm
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']), # ONTAP to ONTAP, check for update
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']), # check health
+ ])
+ register_responses(responses)
+ module_args = {
+ "use_rest": "never",
+ "source_hostname": "10.10.10.10",
+ "schedule": "abc",
+ "identity_preserve": True,
+ "relationship_type": "data_protection",
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ module_args.pop('schedule')
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+@patch('time.sleep')
+def test_successful_create_with_peer(dont_sleep):
+ ''' creating snapmirror and testing idempotency '''
+ register_responses(zapi_create_responses)
+ module_args = {
+ "use_rest": "never",
+ "peer_options": {"hostname": "10.10.10.10"},
+ "schedule": "abc",
+ "identity_preserve": True,
+ "relationship_type": "data_protection",
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+@patch('time.sleep')
+def test_negative_break(dont_sleep):
+ ''' breaking snapmirror to test quiesce time-delay failure '''
+ register_responses([
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']),
+ ('ZAPI', 'vserver-peer-get-iter', ZRR['vserver_peer_info']), # validate source svm
+ ('ZAPI', 'snapmirror-quiesce', ZRR['success']),
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']), # 5 retries
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']),
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']),
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']),
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']),
+ ])
+ module_args = {
+ "use_rest": "never",
+ "source_hostname": "10.10.10.10",
+ "relationship_state": "broken",
+ "relationship_type": "data_protection",
+ }
+ msg = "Taking a long time to quiesce SnapMirror relationship, try again later"
+ assert call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg'] == msg
+
+
+@patch('time.sleep')
+def test_successful_break(dont_sleep):
+ ''' breaking snapmirror and testing idempotency '''
+ register_responses([
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']),
+ ('ZAPI', 'vserver-peer-get-iter', ZRR['vserver_peer_info']), # validate source svm
+ ('ZAPI', 'snapmirror-quiesce', ZRR['success']),
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info_snapmirrored_quiesced']),
+ ('ZAPI', 'snapmirror-break', ZRR['success']),
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']), # check health
+ # idempotency
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info_broken_off']),
+ ('ZAPI', 'vserver-peer-get-iter', ZRR['vserver_peer_info']), # validate source svm
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']), # check health
+ ])
+ module_args = {
+ "use_rest": "never",
+ "source_hostname": "10.10.10.10",
+ "relationship_state": "broken",
+ "relationship_type": "data_protection",
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successful_create_without_initialize():
+ ''' creating snapmirror and testing idempotency '''
+ register_responses([
+ ('ZAPI', 'snapmirror-get-iter', ZRR['no_records']), # ONTAP to ONTAP
+ ('ZAPI', 'snapmirror-create', ZRR['success']),
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']), # check health
+ # idempotency
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']), # ONTAP to ONTAP
+ ('ZAPI', 'vserver-peer-get-iter', ZRR['vserver_peer_info']), # validate source svm
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']), # ONTAP to ONTAP, check for update
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']), # check health
+ ])
+ module_args = {
+ "use_rest": "never",
+ "source_hostname": "10.10.10.10",
+ "schedule": "abc",
+ "relationship_type": "data_protection",
+ "initialize": False,
+ "policy": 'ansible_policy',
+ "max_transfer_rate": 10000,
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ module_args.pop('schedule')
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_negative_set_source_peer():
+ module_args = {
+ 'connection_type': 'ontap_elementsw'
+ }
+ error = 'Error: peer_options are required to identify ONTAP cluster with connection_type: ontap_elementsw'
+ assert error in create_module(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ module_args = {
+ 'connection_type': 'elementsw_ontap'
+ }
+ error = 'Error: peer_options are required to identify SolidFire cluster with connection_type: elementsw_ontap'
+ assert error in create_module(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.create_sf_connection')
+def test_set_element_connection(mock_create_sf_cx):
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ])
+ module_args = {
+ 'peer_options': {'hostname': 'any'}
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ elementsw_helper, elem = my_obj.set_element_connection('source')
+ assert elementsw_helper is not None
+ assert elem is not None
+ elementsw_helper, elem = my_obj.set_element_connection('destination')
+ assert elementsw_helper is not None
+ assert elem is not None
+
+
+@patch('time.sleep')
+@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_snapmirror.NetAppONTAPSnapmirror.set_element_connection')
+def test_successful_element_ontap_create(connection, dont_sleep):
+ register_responses([
+ ('ZAPI', 'snapmirror-get-iter', ZRR['no_records']), # element to ONTAP
+ ('ZAPI', 'snapmirror-create', ZRR['success']),
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']),
+ ('ZAPI', 'snapmirror-initialize', ZRR['sm_info']),
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']), # check status
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']), # check health
+ # idempotency
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info_snapmirrored_from_element']), # element to ONTAP
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']), # element to ONTAP, check for update
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']), # check health
+ ])
+ mock_elem, mock_helper = Mock(), Mock()
+ connection.return_value = mock_helper, mock_elem
+ mock_elem.get_cluster_info.return_value.cluster_info.svip = '10.10.10.11'
+ module_args = {
+ "use_rest": "never",
+ "source_hostname": "10.10.10.10",
+ "connection_type": "elementsw_ontap",
+ "schedule": "abc",
+ "source_path": "10.10.10.11:/lun/1000",
+ "relationship_type": "data_protection",
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ module_args.pop('schedule')
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+@patch('time.sleep')
+@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_snapmirror.NetAppONTAPSnapmirror.set_element_connection')
+def test_successful_ontap_element_create(connection, dont_sleep):
+ ''' check elementsw parameters for source '''
+ register_responses([
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']), # an existing relationship is required element to ONTAP
+ ('ZAPI', 'snapmirror-get-iter', ZRR['no_records']), # ONTAP to element
+ ('ZAPI', 'snapmirror-create', ZRR['success']),
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']),
+ ('ZAPI', 'snapmirror-initialize', ZRR['sm_info']),
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']), # check status
+ # idempotency
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']), # an existing relationship is required element to ONTAP
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info_snapmirrored_to_element']), # ONTAP to element
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']), # ONTAP to element, check for update
+ ])
+ mock_elem, mock_helper = Mock(), Mock()
+ connection.return_value = mock_helper, mock_elem
+ mock_elem.get_cluster_info.return_value.cluster_info.svip = '10.10.10.11'
+ module_args = {
+ "use_rest": "never",
+ "source_hostname": "10.10.10.10",
+ "connection_type": "ontap_elementsw",
+ "schedule": "abc",
+ "destination_path": "10.10.10.11:/lun/1000",
+ "relationship_type": "data_protection",
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ module_args.pop('schedule')
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+@patch('time.sleep')
+def test_successful_delete(dont_sleep):
+ ''' deleting snapmirror and testing idempotency '''
+ register_responses([
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']),
+ ('ZAPI', 'vserver-peer-get-iter', ZRR['vserver_peer_info']), # validate source svm
+ ('ZAPI', 'snapmirror-quiesce', ZRR['success']),
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info_snapmirrored_quiesced']),
+ ('ZAPI', 'snapmirror-break', ZRR['success']),
+ ('ZAPI', 'snapmirror-get-destination-iter', ZRR['sm_info']),
+ ('ZAPI', 'snapmirror-release', ZRR['success']),
+ ('ZAPI', 'snapmirror-destroy', ZRR['success']),
+ ('ZAPI', 'snapmirror-get-iter', ZRR['no_records']), # check health
+ # idempotency
+ ('ZAPI', 'snapmirror-get-iter', ZRR['no_records']),
+ ('ZAPI', 'snapmirror-get-iter', ZRR['no_records']), # check health
+ ])
+ module_args = {
+ "use_rest": "never",
+ "state": "absent",
+ "source_hostname": "10.10.10.10",
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+@patch('time.sleep')
+def test_successful_delete_without_source_hostname_check(dont_sleep):
+ ''' source cluster hostname is optional when source is unknown'''
+ register_responses([
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']),
+ ('ZAPI', 'vserver-peer-get-iter', ZRR['vserver_peer_info']), # validate source svm
+ ('ZAPI', 'snapmirror-quiesce', ZRR['success']),
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info_snapmirrored_quiesced']),
+ ('ZAPI', 'snapmirror-break', ZRR['success']),
+ ('ZAPI', 'snapmirror-destroy', ZRR['sm_info']),
+ ('ZAPI', 'snapmirror-get-iter', ZRR['no_records']), # check health
+ ])
+ module_args = {
+ "use_rest": "never",
+ "state": "absent",
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+@patch('time.sleep')
+def test_successful_delete_with_error_on_break(dont_sleep):
+ ''' source cluster hostname is optional when source is unknown'''
+ register_responses([
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']),
+ ('ZAPI', 'snapmirror-quiesce', ZRR['success']),
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info_snapmirrored_quiesced']),
+ ('ZAPI', 'snapmirror-break', ZRR['error']),
+ ('ZAPI', 'snapmirror-destroy', ZRR['sm_info']),
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']), # check health
+ ])
+ module_args = {
+ "use_rest": "never",
+ "state": "absent",
+ "validate_source_path": False
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ print_warnings()
+ assert_warning_was_raised('Ignored error(s): Error breaking SnapMirror relationship: NetApp API failed. Reason - 12345:synthetic error for UT purpose')
+
+
+@patch('time.sleep')
+def test_negative_delete_error_with_error_on_break(dont_sleep):
+ ''' source cluster hostname is optional when source is unknown'''
+ register_responses([
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']),
+ ('ZAPI', 'snapmirror-quiesce', ZRR['success']),
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info_snapmirrored_quiesced']),
+ ('ZAPI', 'snapmirror-break', ZRR['error']),
+ ('ZAPI', 'snapmirror-destroy', ZRR['error']),
+ ])
+ module_args = {
+ "use_rest": "never",
+ "state": "absent",
+ "validate_source_path": False
+ }
+ error = call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert 'Previous error(s): Error breaking SnapMirror relationship: NetApp API failed. Reason - 12345:synthetic error for UT purpose' in error
+ assert 'Error deleting SnapMirror:' in error
+
+
+def test_negative_delete_with_destination_path_missing():
+ ''' with misisng destination_path'''
+ register_responses([
+ ])
+ args = dict(DEFAULT_ARGS)
+ args.pop('destination_path')
+ module_args = {
+ "use_rest": "never",
+ "state": "absent",
+ "source_hostname": "source_host",
+ }
+ msg = "Missing parameters: Source path or Destination path"
+ assert call_main(my_main, args, module_args, fail=True)['msg'] == msg
+
+
+def test_successful_delete_check_get_destination():
+ register_responses([
+ ('ZAPI', 'snapmirror-get-destination-iter', ZRR['sm_info']),
+ ('ZAPI', 'snapmirror-get-destination-iter', ZRR['no_records']),
+ ])
+ module_args = {
+ "use_rest": "never",
+ "state": "absent",
+ "source_hostname": "source_host",
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.set_source_cluster_connection() is None
+ assert my_obj.get_destination()
+ assert my_obj.get_destination() is None
+
+
+def test_snapmirror_release():
+ register_responses([
+ ('ZAPI', 'snapmirror-release', ZRR['success']),
+ ])
+ module_args = {
+ "use_rest": "never",
+ "source_hostname": "source_host",
+ "source_volume": "source_volume",
+ "source_vserver": "source_vserver",
+ "destination_volume": "destination_volume",
+ "destination_vserver": "destination_vserver",
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.set_source_cluster_connection() is None
+ assert my_obj.snapmirror_release() is None
+
+
+def test_snapmirror_resume():
+ ''' resuming snapmirror '''
+ register_responses([
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info_snapmirrored_quiesced']),
+ ('ZAPI', 'snapmirror-resume', ZRR['success']),
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']), # update reads mirror_state
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']), # check health
+ # idempotency test
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info_snapmirrored']),
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']), # update reads mirror_state
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']), # check health
+ ])
+ module_args = {
+ "use_rest": "never",
+ "relationship_type": "data_protection",
+ "validate_source_path": False
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_snapmirror_restore():
+ ''' restore snapmirror '''
+ register_responses([
+ ('ZAPI', 'snapmirror-restore', ZRR['success']),
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']), # check health
+ # idempotency test - TODO
+ ('ZAPI', 'snapmirror-restore', ZRR['success']),
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']), # check health
+ ])
+ module_args = {
+ "use_rest": "never",
+ "relationship_type": "restore",
+ "source_snapshot": "source_snapshot",
+ "clean_up_failure": True,
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ # TODO: should be idempotent! But we don't read the current state!
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+@patch('time.sleep')
+def test_successful_abort(dont_sleep):
+ ''' aborting snapmirror and testing idempotency '''
+ register_responses([
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']),
+ ('ZAPI', 'snapmirror-quiesce', ZRR['success']),
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info_snapmirrored_quiesced']),
+ ('ZAPI', 'snapmirror-break', ZRR['success']),
+ ('ZAPI', 'snapmirror-destroy', ZRR['sm_info']),
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']), # check health
+ # idempotency test
+ ('ZAPI', 'snapmirror-get-iter', ZRR['no_records']),
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']), # check health
+ ])
+ module_args = {
+ "use_rest": "never",
+ "state": "absent",
+ "validate_source_path": False
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successful_modify():
+ ''' modifying snapmirror and testing idempotency '''
+ register_responses([
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']),
+ ('ZAPI', 'snapmirror-modify', ZRR['success']),
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']), # update reads mirror_state
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']), # check health
+ # idempotency test
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']),
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']), # update reads mirror_state
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']), # check health
+ ])
+ module_args = {
+ "use_rest": "never",
+ "relationship_type": "data_protection",
+ "policy": "ansible2",
+ "schedule": "abc2",
+ "max_transfer_rate": 2000,
+ "validate_source_path": False
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ module_args = {
+ "use_rest": "never",
+ "relationship_type": "data_protection",
+ "validate_source_path": False
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+@patch('time.sleep')
+def test_successful_initialize(dont_sleep):
+ ''' initialize snapmirror and testing idempotency '''
+ register_responses([
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info_uninitialized']),
+ ('ZAPI', 'snapmirror-initialize', ZRR['success']),
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']), # check status
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']), # update reads mirror_state
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']), # check health
+ # 2nd run
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info_uninitialized_load_sharing']),
+ ('ZAPI', 'snapmirror-initialize-ls-set', ZRR['success']),
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']), # check status
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']), # update reads mirror_state
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']), # check health
+ ])
+ module_args = {
+ "use_rest": "never",
+ "relationship_type": "data_protection",
+ "validate_source_path": False
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ module_args = {
+ "use_rest": "never",
+ "relationship_type": "load_sharing",
+ "validate_source_path": False
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successful_update():
+ ''' update snapmirror and testing idempotency '''
+ register_responses([
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']),
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info_snapmirrored']), # update reads mirror_state
+ ('ZAPI', 'snapmirror-update', ZRR['success']), # update
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']), # check health
+ # 2nd run
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info_snapmirrored_load_sharing']),
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info_snapmirrored_load_sharing']), # update reads mirror_state
+ ('ZAPI', 'snapmirror-update-ls-set', ZRR['success']), # update
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info']), # check health
+ ])
+ module_args = {
+ "use_rest": "never",
+ "relationship_type": "data_protection",
+ "validate_source_path": False
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ module_args = {
+ "use_rest": "never",
+ "relationship_type": "load_sharing",
+ "validate_source_path": False
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_snapmirror.NetAppONTAPSnapmirror.set_element_connection')
+def test_elementsw_no_source_path(connection):
+ ''' elementsw_volume_exists '''
+ register_responses([
+ ('ZAPI', 'snapmirror-get-iter', ZRR['no_records']),
+ ])
+ mock_elem, mock_helper = Mock(), Mock()
+ connection.return_value = mock_helper, mock_elem
+ mock_elem.get_cluster_info.return_value.cluster_info.svip = '10.11.12.13'
+ module_args = {
+ "use_rest": "never",
+ "source_hostname": "source_host",
+ "source_username": "source_user",
+ "connection_type": "ontap_elementsw",
+ "destination_path": "10.11.12.13:/lun/1234"
+ }
+ error = 'Error: creating an ONTAP to ElementSW snapmirror relationship requires an established SnapMirror relation from ElementSW to ONTAP cluster'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_elementsw_volume_exists():
+ ''' elementsw_volume_exists '''
+ mock_helper = Mock()
+ mock_helper.volume_id_exists.side_effect = [1000, None]
+ module_args = {
+ "use_rest": "never",
+ "source_hostname": "source_host",
+ "source_username": "source_user",
+ "source_path": "10.10.10.10:/lun/1000",
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.check_if_elementsw_volume_exists('10.10.10.10:/lun/1000', mock_helper) is None
+ expect_and_capture_ansible_exception(my_obj.check_if_elementsw_volume_exists, 'fail', '10.10.10.11:/lun/1000', mock_helper)
+ mock_helper.volume_id_exists.side_effect = ApiServerError('function_name', {})
+ error = 'Error fetching Volume details'
+ assert error in expect_and_capture_ansible_exception(my_obj.check_if_elementsw_volume_exists, 'fail', '1234', mock_helper)['msg']
+
+
+def test_elementsw_svip_exists():
+ ''' svip_exists '''
+ mock_elem = Mock()
+ mock_elem.get_cluster_info.return_value.cluster_info.svip = '10.10.10.10'
+ module_args = {
+ "use_rest": "never",
+ "source_hostname": "source_host",
+ "source_username": "source_user",
+ # "source_password": "source_password",
+ "source_path": "10.10.10.10:/lun/1000",
+ # "source_volume": "source_volume",
+ # "source_vserver": "source_vserver",
+ # "destination_volume": "destination_volume",
+ # "destination_vserver": "destination_vserver",
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.validate_elementsw_svip('10.10.10.10:/lun/1000', mock_elem) is None
+
+
+def test_elementsw_svip_exists_negative():
+ ''' svip_exists negative testing'''
+ mock_elem = Mock()
+ mock_elem.get_cluster_info.return_value.cluster_info.svip = '10.10.10.10'
+ module_args = {
+ "use_rest": "never",
+ "source_hostname": "source_host",
+ "source_username": "source_user",
+ "source_path": "10.10.10.10:/lun/1000",
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ expect_and_capture_ansible_exception(my_obj.validate_elementsw_svip, 'fail', '10.10.10.11:/lun/1000', mock_elem)
+ mock_elem.get_cluster_info.side_effect = ApiServerError('function_name', {})
+ error = 'Error fetching SVIP'
+ assert error in expect_and_capture_ansible_exception(my_obj.validate_elementsw_svip, 'fail', 'svip', mock_elem)['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_snapmirror.NetAppONTAPSnapmirror.set_element_connection')
+def test_check_elementsw_params_source(connection):
+ ''' check elementsw parameters for source '''
+ mock_elem, mock_helper = Mock(), Mock()
+ connection.return_value = mock_helper, mock_elem
+ mock_elem.get_cluster_info.return_value.cluster_info.svip = '10.10.10.10'
+ module_args = {
+ "use_rest": "never",
+ "source_hostname": "source_host",
+ "source_username": "source_user",
+ "source_path": "10.10.10.10:/lun/1000",
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.check_elementsw_parameters('source') is None
+
+
+def test_check_elementsw_params_negative():
+ ''' check elementsw parameters for source negative testing '''
+ args = dict(DEFAULT_ARGS)
+ del args['source_path']
+ module_args = {
+ "use_rest": "never",
+ }
+ msg = 'Error: Missing required parameter source_path'
+ my_obj = create_module(my_module, args, module_args)
+ assert msg in expect_and_capture_ansible_exception(my_obj.check_elementsw_parameters, 'fail', 'source')['msg']
+
+
+def test_check_elementsw_params_invalid():
+ ''' check elementsw parameters for source invalid testing '''
+ module_args = {
+ "use_rest": "never",
+ "source_hostname": "source_host",
+ "source_volume": "source_volume",
+ "source_vserver": "source_vserver",
+ "destination_volume": "destination_volume",
+ "destination_vserver": "destination_vserver",
+ }
+ msg = 'Error: invalid source_path'
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert msg in expect_and_capture_ansible_exception(my_obj.check_elementsw_parameters, 'fail', 'source')['msg']
+
+
+def test_elementsw_source_path_format():
+ ''' test element_source_path_format_matches '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['volume_info']),
+ ])
+ module_args = {
+ "use_rest": "never",
+ "source_hostname": "source_host",
+ "source_volume": "source_volume",
+ "source_vserver": "source_vserver",
+ "destination_volume": "destination_volume",
+ "destination_vserver": "destination_vserver",
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.check_if_remote_volume_exists()
+ assert my_obj.element_source_path_format_matches('1.1.1.1:dummy') is None
+ assert my_obj.element_source_path_format_matches('10.10.10.10:/lun/10') is not None
+
+
+def test_remote_volume_exists():
+ ''' test check_if_remote_volume_exists '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['volume_info']),
+ ])
+ module_args = {
+ "use_rest": "never",
+ "source_hostname": "source_host",
+ "source_volume": "source_volume",
+ "source_vserver": "source_vserver",
+ "destination_volume": "destination_volume",
+ "destination_vserver": "destination_vserver",
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.check_if_remote_volume_exists()
+
+
+@patch('time.sleep')
+def test_if_all_methods_catch_exception(dont_sleep):
+ module_args = {
+ "use_rest": "never",
+ "source_hostname": "source_host",
+ "source_volume": "source_volume",
+ "source_vserver": "source_vserver",
+ "destination_volume": "destination_volume",
+ "destination_vserver": "destination_vserver",
+ }
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info_snapmirrored_quiesced']),
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ my_obj.source_server = my_obj.server # for get_destination
+ tests = [
+ (my_obj.check_if_remote_volume_exists, [('volume-get-iter', 'error')], 'Error fetching source volume details source_volume:'),
+ (my_obj.get_destination, [('snapmirror-get-destination-iter', 'error')], 'Error fetching snapmirror destinations info:'),
+ (my_obj.get_svm_peer, [('vserver-peer-get-iter', 'error')], 'Error fetching vserver peer info:'),
+ (my_obj.snapmirror_abort, [('snapmirror-abort', 'error')], 'Error aborting SnapMirror relationship:'),
+ (my_obj.snapmirror_break, [('snapmirror-quiesce', 'success'), ('snapmirror-get-iter', 'sm_info_snapmirrored_quiesced'), ('snapmirror-break', 'error')],
+ 'Error breaking SnapMirror relationship:'),
+ (my_obj.snapmirror_create, [('volume-get-iter', 'success')], 'Source volume does not exist. Please specify a volume that exists'),
+ (my_obj.snapmirror_create, [('volume-get-iter', 'volume_info'), ('snapmirror-create', 'error')], 'Error creating SnapMirror'),
+ (my_obj.snapmirror_delete, [('snapmirror-destroy', 'error')], 'Error deleting SnapMirror:'),
+ (my_obj.snapmirror_get, [('snapmirror-get-iter', 'error')], 'Error fetching snapmirror info:'),
+ (my_obj.snapmirror_initialize, [('snapmirror-get-iter', 'sm_info'), ('snapmirror-initialize', 'error')], 'Error initializing SnapMirror:'),
+ (my_obj.snapmirror_modify, [('snapmirror-modify', 'error')], 'Error modifying SnapMirror schedule or policy:'),
+ (my_obj.snapmirror_quiesce, [('snapmirror-quiesce', 'error')], 'Error quiescing SnapMirror:'),
+ (my_obj.snapmirror_release, [('snapmirror-release', 'error')], 'Error releasing SnapMirror relationship:'),
+ (my_obj.snapmirror_resume, [('snapmirror-resume', 'error')], 'Error resuming SnapMirror relationship:'),
+ (my_obj.snapmirror_restore, [('snapmirror-restore', 'error')], 'Error restoring SnapMirror relationship:'),
+ (my_obj.snapmirror_resync, [('snapmirror-resync', 'error')], 'Error resyncing SnapMirror relationship:'),
+ (my_obj.snapmirror_update, [('snapmirror-update', 'error')], 'Error updating SnapMirror:'),
+ ]
+ for (function, zapis, error) in tests:
+ calls = [('ZAPI', zapi[0], ZRR[zapi[1]]) for zapi in zapis]
+ register_responses(calls)
+ if function in (my_obj.get_svm_peer,):
+ assert error in expect_and_capture_ansible_exception(function, 'fail', 's_svm', 'd_svm')['msg']
+ elif function in (my_obj.snapmirror_update, my_obj.snapmirror_modify):
+ assert error in expect_and_capture_ansible_exception(function, 'fail', {})['msg']
+ else:
+ assert error in expect_and_capture_ansible_exception(function, 'fail')['msg']
+
+
+@patch('time.sleep')
+def test_successful_rest_create(dont_sleep):
+ ''' creating snapmirror and testing idempotency '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_1']),
+ ('GET', 'snapmirror/relationships', SRR['zero_records']),
+ ('POST', 'snapmirror/relationships', SRR['success']),
+ ('GET', 'snapmirror/relationships', SRR['sm_get_uninitialized']),
+ ('PATCH', 'snapmirror/relationships/b5ee4571-5429-11ec-9779-005056b39a06', SRR['success']),
+ ('GET', 'snapmirror/relationships', SRR['sm_get_mirrored']), # check initialized
+ ('GET', 'snapmirror/relationships', SRR['sm_get_mirrored']), # check health
+ ('GET', 'cluster', SRR['is_rest_9_11_1']),
+ ('GET', 'snapmirror/relationships', SRR['sm_get_mirrored']),
+ ('GET', 'snapmirror/relationships', SRR['sm_get_mirrored']), # check health
+ ])
+ module_args = {
+ "use_rest": "always",
+ "schedule": "abc",
+ "identity_preservation": "full"
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ module_args['update'] = False
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_negative_rest_create():
+ ''' creating snapmirror with unsupported REST options '''
+ module_args = {
+ "use_rest": "always",
+ "identity_preserve": True,
+ "schedule": "abc",
+ "relationship_type": "data_protection",
+ }
+ msg = "REST API currently does not support 'identity_preserve, relationship_type: data_protection'"
+ assert create_module(my_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_negative_rest_create_schedule_not_supported():
+ ''' creating snapmirror with unsupported REST options '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ])
+ module_args = {
+ "use_rest": "always",
+ "schedule": "abc",
+ }
+ msg = "Error: Minimum version of ONTAP for schedule is (9, 11, 1). Current version: (9, 8, 0)."\
+ " - With REST use the policy option to define a schedule."
+ assert create_module(my_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_negative_rest_create_identity_preservation_not_supported():
+ ''' creating snapmirror with unsupported REST options '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ])
+ module_args = {
+ "use_rest": "always",
+ "identity_preservation": "full",
+ }
+ msg = "Error: Minimum version of ONTAP for identity_preservation is (9, 11, 1). Current version: (9, 8, 0)."
+ error = create_module(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert error == msg
+
+
+def test_negative_rest_get_error():
+ ''' creating snapmirror with API error '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'snapmirror/relationships', SRR['generic_error']),
+ ])
+ module_args = {
+ "use_rest": "always",
+ }
+ msg = "Error getting SnapMirror svmdst3:voldst1: calling: snapmirror/relationships: got Expected error."
+ assert call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_negative_rest_create_error():
+ ''' creating snapmirror with API error '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'snapmirror/relationships', SRR['zero_records']),
+ ('POST', 'snapmirror/relationships', SRR['generic_error']),
+ ])
+ module_args = {
+ "use_rest": "always",
+ }
+ msg = "Error creating SnapMirror: calling: snapmirror/relationships: got Expected error."
+ assert call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg'] == msg
+
+
+@patch('time.sleep')
+def test_rest_snapmirror_initialize(dont_sleep):
+ ''' snapmirror initialize testing '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'snapmirror/relationships', SRR['sm_get_uninitialized_xfering']),
+ ('GET', 'snapmirror/relationships', SRR['sm_get_uninitialized_xfering']),
+ ('GET', 'snapmirror/relationships', SRR['sm_get_uninitialized']),
+ ('PATCH', 'snapmirror/relationships/b5ee4571-5429-11ec-9779-005056b39a06', SRR['success']), # Inside SM init patch response
+ ('GET', 'snapmirror/relationships', SRR['sm_get_data_transferring']), # get to check status after initialize
+ ('GET', 'snapmirror/relationships', SRR['sm_get_data_transferring']), # get to check status after initialize
+ ('GET', 'snapmirror/relationships', SRR['sm_get_mirrored']), # get to check status after initialize
+ ('GET', 'snapmirror/relationships', SRR['sm_get_mirrored']), # check for update
+ ('POST', 'snapmirror/relationships/b5ee4571-5429-11ec-9779-005056b39a06/transfers', SRR['success']), # update
+ ('GET', 'snapmirror/relationships', SRR['sm_get_uninitialized']), # check_health calls sm_get
+ ])
+ module_args = {
+ "use_rest": "always",
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_snapmirror_update():
+ ''' snapmirror initialize testing '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'snapmirror/relationships', SRR['sm_get_mirrored']), # first sm_get
+ ('GET', 'snapmirror/relationships', SRR['sm_get_mirrored']), # apply update calls again sm_get
+ ('POST', 'snapmirror/relationships/b5ee4571-5429-11ec-9779-005056b39a06/transfers', SRR['success']), # sm update
+ ('GET', 'snapmirror/relationships', SRR['sm_get_mirrored']), # check_health calls sm_get
+ ])
+ module_args = {
+ "use_rest": "always",
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+@patch('time.sleep')
+def test_rest_sm_break_success_no_data_transfer(dont_sleep):
+ ''' testing snapmirror break when no_data are transferring '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'snapmirror/relationships', SRR['sm_get_mirrored']), # apply first sm_get with no data transfer
+ ('PATCH', 'snapmirror/relationships/b5ee4571-5429-11ec-9779-005056b39a06', SRR['success']), # SM quiesce response to pause
+ ('GET', 'snapmirror/relationships', SRR['sm_get_mirrored']), # sm quiesce api fn calls again sm_get
+ # sm quiesce validate the state which calls sm_get
+ ('GET', 'snapmirror/relationships', SRR['sm_get_mirrored']),
+ # sm quiesce validate the state which calls sm_get after wait
+ ('GET', 'snapmirror/relationships', SRR['sm_get_paused']),
+ ('PATCH', 'snapmirror/relationships/b5ee4571-5429-11ec-9779-005056b39a06', SRR['success']), # sm break response
+ ('GET', 'snapmirror/relationships', SRR['sm_get_paused']), # check_health calls sm_get
+ ])
+ module_args = {
+ "use_rest": "always",
+ "relationship_state": "broken",
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_sm_break_success_no_data_transfer_idempotency():
+ ''' testing snapmirror break when no_data are transferring idempotency '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'snapmirror/relationships', SRR['sm_get_broken']), # apply first sm_get with no data transfer
+ ('GET', 'snapmirror/relationships', SRR['sm_get_broken']), # check_health calls sm_get
+ ])
+ module_args = {
+ "use_rest": "always",
+ "relationship_state": "broken",
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_sm_break_fails_if_uninit():
+ ''' testing snapmirror break fails if sm state uninitialized '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ # apply first sm_get with state uninitialized
+ ('GET', 'snapmirror/relationships', SRR['sm_get_uninitialized']),
+ ])
+ module_args = {
+ "use_rest": "always",
+ "relationship_state": "broken",
+ }
+ msg = "SnapMirror relationship cannot be broken if mirror state is uninitialized"
+ assert call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_rest_sm_break_fails_if_load_sharing_or_vault():
+ ''' testing snapmirror break fails for load_sharing or vault types '''
+ register_responses([
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info_snapmirrored_load_sharing']),
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info_snapmirrored_vault']),
+ ])
+ module_args = {
+ "use_rest": "never",
+ "relationship_state": "broken",
+ "relationship_type": "load_sharing",
+ "validate_source_path": False
+ }
+ msg = "SnapMirror break is not allowed in a load_sharing or vault relationship"
+ assert call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg'] == msg
+ module_args['relationship_type'] = 'vault'
+ assert call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg'] == msg
+
+
+@patch('time.sleep')
+def test_rest_snapmirror_quiesce_fail_when_state_not_paused(dont_sleep):
+ ''' testing snapmirror break when no_data are transferring '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'snapmirror/relationships', SRR['sm_get_mirrored']), # apply first sm_get with no data transfer
+ ('PATCH', 'snapmirror/relationships/b5ee4571-5429-11ec-9779-005056b39a06', SRR['success']), # SM quiesce response
+ # SM quiesce validate the state which calls sm_get after wait
+ ('GET', 'snapmirror/relationships', SRR['sm_get_mirrored']),
+ ('GET', 'snapmirror/relationships', SRR['sm_get_mirrored']), # first fail
+ ('GET', 'snapmirror/relationships', SRR['sm_get_mirrored']), # second fail
+ ('GET', 'snapmirror/relationships', SRR['sm_get_mirrored']), # third fail
+ ('GET', 'snapmirror/relationships', SRR['sm_get_mirrored']), # fourth fail
+ ])
+ module_args = {
+ "use_rest": "always",
+ "relationship_state": "broken",
+ "validate_source_path": False
+ }
+ msg = "Taking a long time to quiesce SnapMirror relationship, try again later"
+ assert call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_rest_snapmirror_break_fails_if_data_is_transferring():
+ ''' testing snapmirror break when no_data are transferring '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ # apply first sm_get with data transfer
+ ('GET', 'snapmirror/relationships', SRR['sm_get_data_transferring']),
+ ])
+ module_args = {
+ "use_rest": "always",
+ "relationship_state": "broken",
+ }
+ msg = "snapmirror data are transferring"
+ assert call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg'] == msg
+
+
+@patch('time.sleep')
+def test_rest_resync_when_state_is_broken(dont_sleep):
+ ''' resync when snapmirror state is broken and relationship_state active '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'snapmirror/relationships', SRR['sm_get_broken']), # apply first sm_get with state broken_off
+ ('PATCH', 'snapmirror/relationships/b5ee4571-5429-11ec-9779-005056b39a06', SRR['success']), # sm resync response
+ ('GET', 'snapmirror/relationships', SRR['sm_get_mirrored']), # check for idle
+ ('GET', 'snapmirror/relationships', SRR['sm_get_mirrored']), # check_health calls sm_get
+ ])
+ module_args = {
+ "use_rest": "always",
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_resume_when_state_quiesced():
+ ''' resync when snapmirror state is broken and relationship_state active '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'snapmirror/relationships', SRR['sm_get_paused']), # apply first sm_get with state quiesced
+ ('PATCH', 'snapmirror/relationships/b5ee4571-5429-11ec-9779-005056b39a06', SRR['success']), # sm resync response
+ ('GET', 'snapmirror/relationships', SRR['sm_get_mirrored']), # sm update calls sm_get
+ ('POST', 'snapmirror/relationships/b5ee4571-5429-11ec-9779-005056b39a06/transfers', SRR['success']), # sm update response
+ ('GET', 'snapmirror/relationships', SRR['sm_get_mirrored']), # check_health calls sm_get
+ ])
+ module_args = {
+ "use_rest": "always",
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+@patch('time.sleep')
+def test_rest_snapmirror_delete(dont_sleep):
+ ''' snapmirror delete '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'snapmirror/relationships', SRR['sm_get_mirrored']), # apply first sm_get with no data transfer
+ ('PATCH', 'snapmirror/relationships/b5ee4571-5429-11ec-9779-005056b39a06', SRR['success']), # sm quiesce response
+ # sm quiesce validate the state which calls sm_get
+ ('GET', 'snapmirror/relationships', SRR['sm_get_mirrored']),
+ # sm quiesce validate the state which calls sm_get after wait with 0 iter
+ ('GET', 'snapmirror/relationships', SRR['sm_get_paused']),
+ ('PATCH', 'snapmirror/relationships/b5ee4571-5429-11ec-9779-005056b39a06', SRR['success']), # sm break response
+ ('DELETE', 'snapmirror/relationships/b5ee4571-5429-11ec-9779-005056b39a06', SRR['success']), # sm delete response
+ ('GET', 'snapmirror/relationships', SRR['zero_records']), # check_health calls sm_get
+ ])
+ module_args = {
+ "use_rest": "always",
+ "state": "absent",
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+@patch('time.sleep')
+def test_rest_snapmirror_delete_with_error_on_break(dont_sleep):
+ ''' snapmirror delete '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'snapmirror/relationships', SRR['sm_get_mirrored']), # apply first sm_get with no data transfer
+ ('PATCH', 'snapmirror/relationships/b5ee4571-5429-11ec-9779-005056b39a06', SRR['success']), # sm quiesce response
+ # sm quiesce validate the state which calls sm_get
+ ('GET', 'snapmirror/relationships', SRR['sm_get_mirrored']),
+ # sm quiesce validate the state which calls sm_get after wait with 0 iter
+ ('GET', 'snapmirror/relationships', SRR['sm_get_paused']),
+ ('PATCH', 'snapmirror/relationships/b5ee4571-5429-11ec-9779-005056b39a06', SRR['generic_error']), # sm break response
+ ('DELETE', 'snapmirror/relationships/b5ee4571-5429-11ec-9779-005056b39a06', SRR['success']), # sm delete response
+ ('GET', 'snapmirror/relationships', SRR['zero_records']), # check_health calls sm_get
+ ])
+ module_args = {
+ "use_rest": "always",
+ "state": "absent",
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ print_warnings()
+ assert_warning_was_raised("Ignored error(s): Error patching SnapMirror: {'state': 'broken_off'}: "
+ "calling: snapmirror/relationships/b5ee4571-5429-11ec-9779-005056b39a06: got Expected error.")
+
+
+@patch('time.sleep')
+def test_rest_snapmirror_delete_with_error_on_break_and_delete(dont_sleep):
+ ''' snapmirror delete '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'snapmirror/relationships', SRR['sm_get_mirrored']), # apply first sm_get with no data transfer
+ ('PATCH', 'snapmirror/relationships/b5ee4571-5429-11ec-9779-005056b39a06', SRR['success']), # sm quiesce response
+ # sm quiesce validate the state which calls sm_get
+ ('GET', 'snapmirror/relationships', SRR['sm_get_mirrored']),
+ # sm quiesce validate the state which calls sm_get after wait with 0 iter
+ ('GET', 'snapmirror/relationships', SRR['sm_get_paused']),
+ ('PATCH', 'snapmirror/relationships/b5ee4571-5429-11ec-9779-005056b39a06', SRR['generic_error']), # sm break response
+ ('DELETE', 'snapmirror/relationships/b5ee4571-5429-11ec-9779-005056b39a06', SRR['generic_error']), # sm delete response
+ ])
+ module_args = {
+ "use_rest": "always",
+ "state": "absent",
+ }
+ error = call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ print_warnings()
+ assert "Previous error(s): Error patching SnapMirror: {'state': 'broken_off'}" in error
+ assert "Error deleting SnapMirror: calling: snapmirror/relationships/b5ee4571-5429-11ec-9779-005056b39a06: got Expected error" in error
+
+
+@patch('time.sleep')
+def test_rest_snapmirror_delete_calls_abort(dont_sleep):
+ ''' snapmirror delete calls abort when transfer state is in transferring'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ # apply first sm_get with data transfer
+ ('GET', 'snapmirror/relationships', SRR['sm_get_data_transferring']),
+ # abort
+ ('PATCH', 'snapmirror/relationships/b5ee4571-5429-11ec-9779-005056b39a06/transfers/xfer_uuid', SRR['empty_good']),
+ ('GET', 'snapmirror/relationships', SRR['sm_get_abort']), # wait_for_status calls again sm_get
+ ('PATCH', 'snapmirror/relationships/b5ee4571-5429-11ec-9779-005056b39a06', SRR['success']), # sm quiesce response
+ # sm quiesce validate the state which calls sm_get
+ ('GET', 'snapmirror/relationships', SRR['sm_get_mirrored']),
+ # sm quiesce validate the state which calls sm_get after wait with 0 iter
+ ('GET', 'snapmirror/relationships', SRR['sm_get_paused']),
+ ('PATCH', 'snapmirror/relationships/b5ee4571-5429-11ec-9779-005056b39a06', SRR['success']), # sm break response
+ ('DELETE', 'snapmirror/relationships/b5ee4571-5429-11ec-9779-005056b39a06', SRR['success']), # sm delete response
+ ('GET', 'snapmirror/relationships', SRR['zero_records']), # check_health calls sm_get
+ ])
+ module_args = {
+ "use_rest": "always",
+ "state": "absent",
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_snapmirror_modify():
+ ''' snapmirror modify'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_1']),
+ ('GET', 'snapmirror/relationships', SRR['sm_get_mirrored']), # apply first sm_get
+ ('PATCH', 'snapmirror/relationships/b5ee4571-5429-11ec-9779-005056b39a06', SRR['success']), # sm modify response
+ ('GET', 'snapmirror/relationships', SRR['sm_get_mirrored']), # sm update calls sm_get to check mirror state
+ ('POST', 'snapmirror/relationships/b5ee4571-5429-11ec-9779-005056b39a06/transfers', SRR['success']), # sm update response
+ ('GET', 'snapmirror/relationships', SRR['zero_records']), # check_health calls sm_get
+ ])
+ module_args = {
+ "use_rest": "always",
+ "policy": "Asynchronous",
+ "schedule": "abcdef",
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_snapmirror_modify_warning():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_1']),
+ ('GET', 'snapmirror/relationships', SRR['sm_get_mirrored']), # apply first sm_get
+ ('PATCH', 'snapmirror/relationships/b5ee4571-5429-11ec-9779-005056b39a06', SRR['success']), # sm modify response
+ ])
+ module_args = {
+ "use_rest": "always",
+ "policy": "Asynchronous",
+ "schedule": "abcdef",
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.snapmirror_mod_init_resync_break_quiesce_resume_rest(modify=module_args) is None
+ print_warnings()
+ assert_warning_was_raised('Unexpected key in modify: use_rest, value: always')
+
+
+def test_rest_snapmirror_restore():
+ ''' snapmirror restore '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ # ('GET', 'snapmirror/relationships', SRR['sm_get_mirrored']), # apply first sm_get
+ ('POST', 'snapmirror/relationships', SRR['success']), # first post response
+ ('GET', 'snapmirror/relationships', SRR['sm_get_mirrored']), # After first post call to get relationship uuid
+ ('POST', 'snapmirror/relationships/b5ee4571-5429-11ec-9779-005056b39a06/transfers', SRR['success']), # second post response
+ ('GET', 'snapmirror/relationships', SRR['sm_get_mirrored']), # check_health calls sm_get
+ ])
+ module_args = {
+ "use_rest": "always",
+ "relationship_type": "restore",
+ "source_snapshot": "source_snapshot",
+ "clean_up_failure": False,
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_error_snapmirror_create_and_initialize_not_found():
+ ''' snapmirror restore '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'snapmirror/relationships', SRR['zero_records']), # apply first sm_get
+ ('GET', 'snapmirror/policies', SRR['zero_records']), # policy not found
+ ])
+ module_args = {
+ "use_rest": "always",
+ "create_destination": {"enabled": True},
+ "policy": "sm_policy"
+ }
+ error = 'Error: cannot find policy sm_policy for vserver svmdst3'
+ assert error == call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_rest_error_snapmirror_create_and_initialize_bad_type():
+ ''' snapmirror restore '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'snapmirror/relationships', SRR['zero_records']), # apply first sm_get
+ ('GET', 'snapmirror/policies', SRR['sm_policies']), # policy
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'snapmirror/relationships', SRR['zero_records']), # apply first sm_get
+ ('GET', 'snapmirror/policies', SRR['sm_policies']), # policy
+ ])
+ module_args = {
+ "use_rest": "always",
+ "create_destination": {"enabled": True},
+ "policy": "sm_policy",
+ "destination_vserver": "bad_type",
+ "source_vserver": "any"
+ }
+ error = 'Error: unexpected type: svm_invalid for policy sm_policy for vserver bad_type'
+ assert error == call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ module_args['destination_vserver'] = 'cluster_scope_only'
+ error = 'Error: unexpected type: system_invalid for policy sm_policy for vserver cluster_scope_only'
+ assert error == call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_rest_errors():
+ ''' generic REST errors '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ # set_initialization_state
+ ('GET', 'snapmirror/policies', SRR['generic_error']),
+ # snapmirror_restore_rest
+ ('POST', 'snapmirror/relationships', SRR['generic_error']),
+ # snapmirror_restore_rest
+ ('POST', 'snapmirror/relationships', SRR['success']),
+ ('POST', 'snapmirror/relationships/1234/transfers', SRR['generic_error']),
+ # snapmirror_mod_init_resync_break_quiesce_resume_rest
+ ('PATCH', 'snapmirror/relationships/1234', SRR['generic_error']),
+ # snapmirror_update_rest
+ ('POST', 'snapmirror/relationships/1234/transfers', SRR['generic_error']),
+ # snapmirror_abort_rest
+ ('PATCH', 'snapmirror/relationships/1234/transfers/5678', SRR['generic_error']),
+ ])
+ module_args = {
+ "use_rest": "always",
+ "policy": "policy"
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ error = rest_error_message("Error fetching SnapMirror policy", 'snapmirror/policies')
+ assert error in expect_and_capture_ansible_exception(my_obj.set_initialization_state, 'fail')['msg']
+ my_obj.parameters['uuid'] = '1234'
+ my_obj.parameters['transfer_uuid'] = '5678'
+ error = rest_error_message("Error restoring SnapMirror", 'snapmirror/relationships')
+ assert error in expect_and_capture_ansible_exception(my_obj.snapmirror_restore_rest, 'fail')['msg']
+ error = rest_error_message("Error restoring SnapMirror Transfer", 'snapmirror/relationships/1234/transfers')
+ assert error in expect_and_capture_ansible_exception(my_obj.snapmirror_restore_rest, 'fail')['msg']
+ my_obj.na_helper.changed = True
+ assert my_obj.snapmirror_mod_init_resync_break_quiesce_resume_rest() is None
+ assert not my_obj.na_helper.changed
+ error = rest_error_message("Error patching SnapMirror: {'state': 'broken_off'}", 'snapmirror/relationships/1234')
+ assert error in expect_and_capture_ansible_exception(my_obj.snapmirror_mod_init_resync_break_quiesce_resume_rest, 'fail', 'broken_off')['msg']
+ error = rest_error_message('Error updating SnapMirror relationship', 'snapmirror/relationships/1234/transfers')
+ assert error in expect_and_capture_ansible_exception(my_obj.snapmirror_update_rest, 'fail')['msg']
+ error = rest_error_message('Error aborting SnapMirror', 'snapmirror/relationships/1234/transfers/5678')
+ assert error in expect_and_capture_ansible_exception(my_obj.snapmirror_abort_rest, 'fail')['msg']
+
+
+def test_rest_error_no_uuid():
+ ''' snapmirror restore '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ # snapmirror_restore_rest
+ ('POST', 'snapmirror/relationships', SRR['success']),
+ ('GET', 'snapmirror/relationships', SRR['zero_records']),
+ # snapmirror_mod_init_resync_break_quiesce_resume_rest
+ ('GET', 'snapmirror/relationships', SRR['zero_records']),
+ # snapmirror_update_rest
+ ('GET', 'snapmirror/relationships', SRR['zero_records']),
+ # others, no call
+ ])
+ module_args = {
+ "use_rest": "always",
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ error = 'Error restoring SnapMirror: unable to get UUID for the SnapMirror relationship.'
+ assert error in expect_and_capture_ansible_exception(my_obj.snapmirror_restore_rest, 'fail')['msg']
+ error = 'Error in updating SnapMirror relationship: unable to get UUID for the SnapMirror relationship.'
+ assert error in expect_and_capture_ansible_exception(my_obj.snapmirror_mod_init_resync_break_quiesce_resume_rest, 'fail')['msg']
+ error = 'Error in updating SnapMirror relationship: unable to get UUID for the SnapMirror relationship.'
+ assert error in expect_and_capture_ansible_exception(my_obj.snapmirror_update_rest, 'fail')['msg']
+ error = 'Error in aborting SnapMirror: unable to get either uuid: None or transfer_uuid: None.'
+ assert error in expect_and_capture_ansible_exception(my_obj.snapmirror_abort_rest, 'fail')['msg']
+ error = 'Error in deleting SnapMirror: None, unable to get UUID for the SnapMirror relationship.'
+ assert error in expect_and_capture_ansible_exception(my_obj.snapmirror_delete_rest, 'fail')['msg']
+
+
+@patch('time.sleep')
+def test_rest_snapmirror_create_and_initialize(dont_sleep):
+ ''' snapmirror restore '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'snapmirror/relationships', SRR['zero_records']), # apply first sm_get
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'storage/volumes', SRR['one_record']),
+ ('GET', 'snapmirror/policies', SRR['sm_policies']), # policy
+ ('POST', 'snapmirror/relationships', SRR['success']), # first post response
+ ('GET', 'snapmirror/relationships', SRR['sm_get_mirrored']), # check status
+ ('GET', 'snapmirror/relationships', SRR['sm_get_mirrored']), # check_health calls sm_get
+ ])
+ module_args = {
+ "use_rest": "always",
+ "create_destination": {"enabled": True},
+ "policy": "sm_policy",
+ # force a call to check_if_remote_volume_exists
+ "peer_options": {"hostname": "10.10.10.10"},
+ "source_volume": "source_volume",
+ "source_vserver": "source_vserver",
+ "destination_volume": "destination_volume",
+ "destination_vserver": "svmdst3"
+
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_set_new_style():
+ # validate the old options are set properly using new endpoints
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ])
+ args = dict(DEFAULT_ARGS)
+ args.pop('source_path')
+ args.pop('destination_path')
+ module_args = {
+ "use_rest": "always",
+ "source_endpoint": {
+ "cluster": "source_cluster",
+ "consistency_group_volumes": "source_consistency_group_volumes",
+ "path": "source_path",
+ "svm": "source_svm",
+ },
+ "destination_endpoint": {
+ "cluster": "destination_cluster",
+ "consistency_group_volumes": "destination_consistency_group_volumes",
+ "path": "destination_path",
+ "svm": "destination_svm",
+ },
+ }
+ my_obj = create_module(my_module, args, module_args)
+ assert my_obj.set_new_style() is None
+ assert my_obj.new_style
+ assert my_obj.parameters['destination_vserver'] == 'destination_svm'
+ assert my_obj.set_initialization_state() == 'in_sync'
+
+
+def test_negative_set_new_style():
+ # validate the old options are set properly using new endpoints
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ])
+ args = dict(DEFAULT_ARGS)
+ args.pop('source_path')
+ args.pop('destination_path')
+ module_args = {
+ "use_rest": "always",
+ "source_endpoint": {
+ "cluster": "source_cluster",
+ "consistency_group_volumes": "source_consistency_group_volumes",
+ "path": "source_path",
+ "svm": "source_svm",
+ },
+ "destination_endpoint": {
+ "cluster": "destination_cluster",
+ "consistency_group_volumes": "destination_consistency_group_volumes",
+ "path": "destination_path",
+ "svm": "destination_svm",
+ },
+ }
+ # errors on source_endpoint
+ my_obj = create_module(my_module, args, module_args)
+ error = expect_and_capture_ansible_exception(my_obj.set_new_style, 'fail')['msg']
+ assert "Error: using any of ['cluster', 'ipspace'] requires ONTAP 9.7 or later and REST must be enabled" in error
+ assert "ONTAP version: 9.6.0 - using REST" in error
+ my_obj = create_module(my_module, args, module_args)
+ error = expect_and_capture_ansible_exception(my_obj.set_new_style, 'fail')['msg']
+ assert "Error: using consistency_group_volumes requires ONTAP 9.8 or later and REST must be enabled" in error
+ assert "ONTAP version: 9.7.0 - using REST" in error
+ # errors on destination_endpoint
+ module_args['source_endpoint'].pop('cluster')
+ my_obj = create_module(my_module, args, module_args)
+ error = expect_and_capture_ansible_exception(my_obj.set_new_style, 'fail')['msg']
+ assert "Error: using any of ['cluster', 'ipspace'] requires ONTAP 9.7 or later and REST must be enabled" in error
+ assert "ONTAP version: 9.6.0 - using REST" in error
+ module_args['source_endpoint'].pop('consistency_group_volumes')
+ my_obj = create_module(my_module, args, module_args)
+ error = expect_and_capture_ansible_exception(my_obj.set_new_style, 'fail')['msg']
+ assert "Error: using consistency_group_volumes requires ONTAP 9.8 or later and REST must be enabled" in error
+ assert "ONTAP version: 9.7.0 - using REST" in error
+ module_args.pop('source_endpoint')
+ module_args.pop('destination_endpoint')
+ my_obj = create_module(my_module, args, module_args)
+ error = expect_and_capture_ansible_exception(my_obj.set_new_style, 'fail')['msg']
+ assert error == 'Missing parameters: Source endpoint or Destination endpoint'
+
+
+def test_check_parameters_new_style():
+ # validate the old options are set properly using new endpoints
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ])
+ args = dict(DEFAULT_ARGS)
+ args.pop('source_path')
+ args.pop('destination_path')
+ module_args = {
+ "use_rest": "always",
+ "source_endpoint": {
+ "cluster": "source_cluster",
+ "consistency_group_volumes": "source_consistency_group_volumes",
+ "path": "source_path",
+ "svm": "source_svm",
+ },
+ "destination_endpoint": {
+ "cluster": "destination_cluster",
+ "consistency_group_volumes": "destination_consistency_group_volumes",
+ "path": "destination_path",
+ "svm": "destination_svm",
+ },
+ }
+ my_obj = create_module(my_module, args, module_args)
+ assert my_obj.check_parameters() is None
+ assert my_obj.new_style
+ assert my_obj.parameters['destination_vserver'] == 'destination_svm'
+
+
+def test_negative_check_parameters_new_style():
+ # validate version checks
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ])
+ args = dict(DEFAULT_ARGS)
+ args.pop('source_path')
+ args.pop('destination_path')
+ module_args = {
+ "use_rest": "always",
+ "source_endpoint": {
+ "cluster": "source_cluster",
+ "consistency_group_volumes": "source_consistency_group_volumes",
+ "path": "source_path",
+ "svm": "source_svm",
+ },
+ "destination_endpoint": {
+ "cluster": "destination_cluster",
+ "consistency_group_volumes": "destination_consistency_group_volumes",
+ "path": "destination_path",
+ "svm": "destination_svm",
+ },
+ "create_destination": {"enabled": True}
+ }
+ # errors on source_endpoint
+ error = 'Minimum version of ONTAP for create_destination is (9, 7).'
+ assert error in create_module(my_module, args, module_args, fail=True)['msg']
+ my_obj = create_module(my_module, args, module_args)
+ error = expect_and_capture_ansible_exception(my_obj.check_parameters, 'fail')['msg']
+ assert "Error: using consistency_group_volumes requires ONTAP 9.8 or later and REST must be enabled" in error
+ assert "ONTAP version: 9.7.0 - using REST" in error
+ module_args['destination_endpoint'].pop('path')
+ error = create_module(my_module, args, module_args, fail=True)['msg']
+ assert "missing required arguments: path found in destination_endpoint" in error
+
+
+def test_check_parameters_old_style():
+ # validate the old options are set properly using new endpoints
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ])
+ # using paths
+ module_args = {
+ "use_rest": "always",
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.check_parameters() is None
+ assert not my_obj.new_style
+ # using volume and vserver, paths are constructed
+ args = dict(DEFAULT_ARGS)
+ args.pop('source_path')
+ args.pop('destination_path')
+ module_args = {
+ "use_rest": "always",
+ "source_volume": "source_vol",
+ "source_vserver": "source_svm",
+ "destination_volume": "dest_vol",
+ "destination_vserver": "dest_svm",
+ }
+ my_obj = create_module(my_module, args, module_args)
+ assert my_obj.check_parameters() is None
+ assert not my_obj.new_style
+ assert my_obj.parameters['source_path'] == "source_svm:source_vol"
+ assert my_obj.parameters['destination_path'] == "dest_svm:dest_vol"
+ # vserver DR
+ module_args = {
+ "use_rest": "always",
+ "source_vserver": "source_svm",
+ "destination_vserver": "dest_svm",
+ }
+ my_obj = create_module(my_module, args, module_args)
+ assert my_obj.check_parameters() is None
+ assert not my_obj.new_style
+ assert my_obj.parameters['source_path'] == "source_svm:"
+ assert my_obj.parameters['destination_path'] == "dest_svm:"
+ body, dummy = my_obj.get_create_body()
+ assert body["source"] == {"path": "source_svm:"}
+ module_args = {
+ "use_rest": "always",
+ "source_volume": "source_vol",
+ "source_vserver": "source_svm",
+ "destination_volume": "dest_vol",
+ "destination_vserver": "dest_svm",
+ }
+ my_obj = create_module(my_module, args, module_args)
+ my_obj.parameters.pop("source_vserver")
+ error = 'Missing parameters: source vserver or destination vserver or both'
+ assert error in expect_and_capture_ansible_exception(my_obj.check_parameters, 'fail')['msg']
+
+
+def test_validate_source_path():
+ # validate source path when vserver local name is different
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'svm/peers', SRR['zero_records']),
+ ('GET', 'svm/peers', SRR['svm_peer_info']),
+ ('GET', 'svm/peers', SRR['svm_peer_info']),
+ # error
+ ('GET', 'svm/peers', SRR['generic_error']),
+ # warnings
+ ])
+ # using paths
+ module_args = {
+ "use_rest": "always",
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ current = None
+ assert my_obj.validate_source_path(current) is None
+ current = {}
+ assert my_obj.validate_source_path(current) is None
+ current = {'source_path': 'svmsrc3:volsrc1'}
+ assert my_obj.validate_source_path(current) is None
+ current = {'source_path': 'svmsrc3:volsrc1'}
+ assert my_obj.validate_source_path(current) is None
+ current = {'source_path': 'vserver:volume'}
+ error = 'Error: another relationship is present for the same destination with source_path: "vserver:volume" '\
+ '(vserver:volume on cluster cluster). Desired: svmsrc3:volsrc1 on None'
+ assert error in expect_and_capture_ansible_exception(my_obj.validate_source_path, 'fail', current)['msg']
+ current = {'source_path': 'vserver:volume1'}
+ my_obj.parameters['connection_type'] = 'other'
+ error = 'Error: another relationship is present for the same destination with source_path: "vserver:volume1".'\
+ ' Desired: svmsrc3:volsrc1 on None'
+ assert error in expect_and_capture_ansible_exception(my_obj.validate_source_path, 'fail', current)['msg']
+ my_obj.parameters['connection_type'] = 'ontap_ontap'
+ current = {'source_path': 'vserver:volume'}
+ error = rest_error_message('Error retrieving SVM peer', 'svm/peers')
+ assert error in expect_and_capture_ansible_exception(my_obj.validate_source_path, 'fail', current)['msg']
+ current = {'source_path': 'vserver/volume'}
+ assert my_obj.validate_source_path(current) is None
+ assert_warning_was_raised('Unexpected source path: vserver/volume, skipping validation.')
+ my_obj.parameters['destination_endpoint'] = {'path': 'vserver/volume'}
+ current = {'source_path': 'vserver:volume'}
+ assert my_obj.validate_source_path(current) is None
+ assert_warning_was_raised('Unexpected destination path: vserver/volume, skipping validation.')
+
+
+@patch('time.sleep')
+def test_wait_for_idle_status(dont_sleep):
+ # validate wait time and time-out
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'snapmirror/relationships', SRR['zero_records']),
+ ('GET', 'snapmirror/relationships', SRR['sm_get_mirrored']),
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'snapmirror/relationships', SRR['zero_records']),
+ ('GET', 'snapmirror/relationships', SRR['sm_get_mirrored']),
+ # time-out
+ ('GET', 'snapmirror/relationships', SRR['zero_records']),
+ ('GET', 'snapmirror/relationships', SRR['zero_records']),
+ ])
+ # using paths
+ module_args = {
+ "use_rest": "always",
+ "transferring_time_out": 0,
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.wait_for_idle_status() is None
+ assert my_obj.wait_for_idle_status() is not None
+ module_args = {
+ "use_rest": "always",
+ "transferring_time_out": 60,
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.wait_for_idle_status() is not None
+ assert my_obj.wait_for_idle_status() is None
+ assert_warning_was_raised('SnapMirror relationship is still transferring after 60 seconds.')
+
+
+def test_dp_to_xdp():
+ # with ZAPI, DP is transformed to XDP to match ONTAP behavior
+ register_responses([
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info_snapmirrored']),
+ ])
+ # using paths
+ module_args = {
+ "use_rest": "never",
+ "relationship_type": 'data_protection',
+ "validate_source_path": False
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.get_actions() is not None
+ assert my_obj.parameters['relationship_type'] == 'extended_data_protection'
+
+
+def test_cannot_change_rtype():
+ # with ZAPI, can't change relationship_type
+ register_responses([
+ ('ZAPI', 'snapmirror-get-iter', ZRR['sm_info_snapmirrored']),
+ ])
+ # using paths
+ module_args = {
+ "use_rest": "never",
+ "relationship_type": 'load_sharing',
+ "validate_source_path": False
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ error = 'Error: cannot modify relationship_type from extended_data_protection to load_sharing.'
+ assert error in expect_and_capture_ansible_exception(my_obj.get_actions, 'fail', )['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.HAS_NETAPP_LIB', False)
+def test_module_fail_when_netapp_lib_missing():
+ ''' required lib missing '''
+ module_args = {
+ 'use_rest': 'never',
+ }
+ assert 'Error: the python NetApp-Lib module is required. Import error: None' in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_check_health():
+ # validate source path when vserver local name is different
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'snapmirror/relationships', SRR['zero_records']),
+ ('GET', 'snapmirror/relationships', SRR['sm_get_abort']),
+ ])
+ module_args = {
+ "use_rest": "always",
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.check_health() is None
+ assert_no_warnings()
+ assert my_obj.check_health() is None
+ assert_warning_was_raised('SnapMirror relationship exists but is not healthy. '
+ 'Unhealthy reason: this is why the relationship is not healthy. '
+ 'Last transfer error: this is why the relationship is not healthy.')
+
+
+def test_negative_check_if_remote_volume_exists_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'storage/volumes', SRR['zero_records']),
+ ('GET', 'storage/volumes', SRR['generic_error']),
+ ])
+ module_args = {
+ "use_rest": "always",
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ error = 'REST is not supported on Source'
+ assert error in expect_and_capture_ansible_exception(my_obj.check_if_remote_volume_exists_rest, 'fail')['msg']
+ my_obj.src_use_rest = True
+ assert not my_obj.check_if_remote_volume_exists_rest()
+ my_obj.parameters['peer_options'] = {}
+ netapp_utils.setup_host_options_from_module_params(my_obj.parameters['peer_options'], my_obj.module, netapp_utils.na_ontap_host_argument_spec_peer().keys())
+ my_obj.parameters['source_volume'] = 'volume'
+ my_obj.parameters['source_vserver'] = 'vserver'
+ assert my_obj.set_source_cluster_connection() is None
+ assert not my_obj.check_if_remote_volume_exists_rest()
+ error = rest_error_message('Error fetching source volume', 'storage/volumes')
+ assert error in expect_and_capture_ansible_exception(my_obj.check_if_remote_volume_exists_rest, 'fail')['msg']
+
+
+def test_snapmirror_release_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ])
+ module_args = {
+ "use_rest": "always",
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.snapmirror_release() is None
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.has_netapp_lib')
+def test_negative_set_source_cluster_connection(mock_netapp_lib):
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ])
+ module_args = {
+ "use_rest": "never",
+ "source_volume": "source_volume",
+ "source_vserver": "source_vserver",
+ "destination_volume": "destination_volume",
+ "destination_vserver": "destination_vserver",
+ "relationship_type": "vault",
+ "peer_options": {
+ "use_rest": "always",
+ "hostname": "source_host",
+ }
+ }
+ mock_netapp_lib.side_effect = [True, False]
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ error = "REST API currently does not support 'relationship_type: vault'"
+ assert error in expect_and_capture_ansible_exception(my_obj.set_source_cluster_connection, 'fail')['msg']
+ my_obj.parameters['peer_options']['use_rest'] = 'auto'
+ error = "Error: the python NetApp-Lib module is required. Import error: None"
+ assert error in expect_and_capture_ansible_exception(my_obj.set_source_cluster_connection, 'fail')['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_snapmirror_policy.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_snapmirror_policy.py
new file mode 100644
index 000000000..23a1e9c64
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_snapmirror_policy.py
@@ -0,0 +1,1269 @@
+# (c) 2019-2023, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests ONTAP Ansible module: na_ontap_snapmirror_policy '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import\
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_error_message, rest_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_error, build_zapi_response, zapi_error_message, zapi_responses
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import\
+ call_main, create_module, patch_ansible, expect_and_capture_ansible_exception, create_and_apply
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_snapmirror_policy import NetAppOntapSnapMirrorPolicy as my_module, main as my_main
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+# REST API canned responses when mocking send_request
+SRR = rest_responses({
+ # common responses
+ 'is_rest': (200, {}, None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'success': (200, {}, None),
+ 'end_of_sequence': (500, None, "Unexpected call to send_request"),
+ 'generic_error': (400, None, "Expected error"),
+ # module specific responses
+ 'get_snapmirror_policy_async': (200, {
+ 'svm': {'name': 'ansible'},
+ 'name': 'ansible',
+ 'uuid': 'abcdef12-3456-7890-abcd-ef1234567890',
+ 'comment': 'created by ansible',
+ 'type': 'async',
+ 'snapmirror_label': [],
+ 'keep': [],
+ 'schedule': [],
+ 'prefix': [],
+ 'network_compression_enabled': True,
+ 'identity_preservation': 'exclude_network_config'
+ }, None),
+ 'get_snapmirror_policy_async_with_options': (200, {
+ 'svm': {'name': 'ansible'},
+ 'name': 'ansible',
+ 'uuid': 'abcdef12-3456-7890-abcd-ef1234567890',
+ 'comment': 'created by ansible',
+ 'type': 'async',
+ 'snapmirror_label': [],
+ 'keep': [],
+ 'schedule': [],
+ 'prefix': [],
+ 'copy_latest_source_snapshot': True,
+ 'network_compression_enabled': True,
+ 'identity_preservation': 'exclude_network_config'
+ }, None),
+ 'get_snapmirror_policy_sync': (200, {
+ 'svm': {'name': 'ansible'},
+ 'name': 'ansible',
+ 'uuid': 'abcdef12-3456-7890-abcd-ef1234567890',
+ 'comment': 'created by ansible',
+ 'type': 'sync',
+ 'snapmirror_label': [],
+ 'keep': [],
+ 'schedule': [],
+ 'prefix': [],
+ 'network_compression_enabled': False
+ }, None),
+ 'get_snapmirror_policy_async_with_rules': (200, {
+ 'svm': {'name': 'ansible'},
+ 'name': 'ansible',
+ 'uuid': 'abcdef12-3456-7890-abcd-ef1234567890',
+ 'comment': 'created by ansible',
+ 'type': 'async',
+ 'retention': [
+ {
+ 'label': 'daily',
+ 'count': 7,
+ 'creation_schedule': {'name': ''},
+ 'prefix': '',
+ },
+ {
+ 'label': 'weekly',
+ 'count': 5,
+ 'creation_schedule': {'name': 'weekly'},
+ 'prefix': 'weekly',
+ },
+ {
+ 'label': 'monthly',
+ 'count': 12,
+ 'creation_schedule': {'name': 'monthly'},
+ 'prefix': 'monthly',
+ },
+ ],
+ 'network_compression_enabled': False
+ }, None),
+ 'get_snapmirror_policy_async_with_rules_dash': (200, {
+ 'svm': {'name': 'ansible'},
+ 'name': 'ansible',
+ 'uuid': 'abcdef12-3456-7890-abcd-ef1234567890',
+ 'comment': 'created by ansible',
+ 'type': 'async',
+ 'retention': [
+ {
+ 'label': 'daily',
+ 'count': 7,
+ 'creation_schedule': {'name': ''},
+ 'prefix': '',
+ },
+ {
+ 'label': 'weekly',
+ 'count': 5,
+ 'creation_schedule': {'name': 'weekly'},
+ 'prefix': 'weekly',
+ },
+ {
+ 'label': 'monthly',
+ 'count': 12,
+ 'creation_schedule': {'name': '-'},
+ 'prefix': '-',
+ },
+ ],
+ 'network_compression_enabled': False
+ }, None),
+ 'get_snapmirror_policy_async_with_create_snapshot_on_source': (200, {
+ 'svm': {'name': 'ansible'},
+ 'name': 'ansible',
+ 'uuid': 'abcdef12-3456-7890-abcd-ef1234567890',
+ 'comment': 'created by ansible',
+ 'type': 'async',
+ 'retention': [
+ {
+ 'label': 'daily',
+ 'count': 7,
+ 'creation_schedule': {'name': ''},
+ 'prefix': '',
+ },
+ ],
+ 'create_snapshot_on_source': False,
+ 'is_network_compression_enabled': True,
+ 'transfer_schedule': {'name': 'yearly'},
+ }, None),
+ 'get_snapmirror_policy_sync_with_sync_type': (200, {
+ 'svm': {'name': 'ansible'},
+ 'name': 'ansible',
+ 'uuid': 'abcdef12-3456-7890-abcd-ef1234567890',
+ 'comment': 'created by ansible',
+ 'type': 'sync',
+ 'sync_type': 'automated_failover',
+ # does not make sense, but does not hurt
+ 'copy_all_source_snapshots': False
+ }, None),
+})
+
+
+snapmirror_policy_info = {
+ 'comment': 'created by ansible',
+ 'policy-name': 'ansible',
+ 'type': 'async_mirror',
+ 'tries': '8',
+ 'transfer-priority': 'normal',
+ 'restart': 'always',
+ 'is-network-compression-enabled': 'false',
+ 'ignore-atime': 'false',
+ 'vserver-name': 'ansible',
+ 'common-snapshot-schedule': 'monthly'
+}
+
+snapmirror_policy_rules = {
+ 'snapmirror-policy-rules': [
+ {'info': {
+ 'snapmirror-label': 'daily',
+ 'keep': 7,
+ 'schedule': '',
+ 'prefix': '',
+ }},
+ {'info': {
+ 'snapmirror-label': 'weekly',
+ 'keep': 5,
+ 'schedule': 'weekly',
+ 'prefix': 'weekly',
+ }},
+ {'info': {
+ 'snapmirror-label': 'monthly',
+ 'keep': 12,
+ 'schedule': 'monthly',
+ 'prefix': 'monthly',
+ }},
+ {'info': {
+ 'snapmirror-label': 'sm_created',
+ 'keep': 12,
+ 'schedule': 'monthly',
+ 'prefix': 'monthly',
+ }},
+ ]
+}
+
+
+def get_snapmirror_policy_info(with_rules=False):
+ info = dict(snapmirror_policy_info)
+ if with_rules:
+ info.update(snapmirror_policy_rules)
+ return {'attributes-list': {'snapmirror-policy-info': info}}
+
+
+ZRR = zapi_responses({
+ 'snapmirror-policy-info': build_zapi_response(get_snapmirror_policy_info()),
+ 'snapmirror-policy-info-with-rules': build_zapi_response(get_snapmirror_policy_info(True)),
+ 'error_13001': build_zapi_error(13001, 'policy not found'),
+})
+
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'use_rest': 'use_rest',
+ 'policy_name': 'ansible',
+ 'vserver': 'ansible',
+}
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ register_responses([
+ ])
+ args = dict(DEFAULT_ARGS)
+ args.pop('policy_name')
+ error = 'missing required arguments: policy_name'
+ assert error in call_main(my_main, args, fail=True)['msg']
+
+
+def test_ensure_get_called():
+ ''' test get_snapmirror_policy for non-existent snapmirror policy'''
+ register_responses([
+ ('ZAPI', 'snapmirror-policy-get-iter', ZRR['no_records']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.get_snapmirror_policy() is None
+
+
+def test_ensure_get_called_existing():
+ ''' test get_snapmirror_policy for existing snapmirror policy'''
+ register_responses([
+ ('ZAPI', 'snapmirror-policy-get-iter', ZRR['snapmirror-policy-info']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.get_snapmirror_policy()
+
+
+def test_successful_create():
+ ''' creating snapmirror policy without rules and testing idempotency '''
+ register_responses([
+ ('ZAPI', 'snapmirror-policy-get-iter', ZRR['no_records']),
+ ('ZAPI', 'snapmirror-policy-create', ZRR['success']),
+ ('ZAPI', 'snapmirror-policy-get-iter', ZRR['snapmirror-policy-info']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'transfer_priority': 'normal'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successful_create_with_rest():
+ ''' creating snapmirror policy without rules via REST and testing idempotency '''
+ register_responses([
+ # default is async
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'svm/svms', SRR['one_vserver_record']),
+ ('GET', 'snapmirror/policies', SRR['zero_records']),
+ ('POST', 'snapmirror/policies', SRR['success']),
+ ('GET', 'snapmirror/policies', SRR['get_snapmirror_policy_async']),
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'svm/svms', SRR['one_vserver_record']),
+ ('GET', 'snapmirror/policies', SRR['get_snapmirror_policy_async']),
+ # explicitly async
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'svm/svms', SRR['one_vserver_record']),
+ ('GET', 'snapmirror/policies', SRR['zero_records']),
+ ('POST', 'snapmirror/policies', SRR['success']),
+ ('GET', 'snapmirror/policies', SRR['get_snapmirror_policy_async_with_options']),
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'svm/svms', SRR['one_vserver_record']),
+ ('GET', 'snapmirror/policies', SRR['get_snapmirror_policy_async_with_options']),
+ # sync
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'svm/svms', SRR['one_vserver_record']),
+ ('GET', 'snapmirror/policies', SRR['zero_records']),
+ ('POST', 'snapmirror/policies', SRR['success']),
+ ('GET', 'snapmirror/policies', SRR['get_snapmirror_policy_sync']),
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'svm/svms', SRR['one_vserver_record']),
+ ('GET', 'snapmirror/policies', SRR['get_snapmirror_policy_sync']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ module_args['policy_type'] = 'async_mirror'
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ module_args['policy_type'] = 'sync_mirror'
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successful_create_with_rules():
+ ''' creating snapmirror policy with rules and testing idempotency '''
+ register_responses([
+ ('ZAPI', 'snapmirror-policy-get-iter', ZRR['error_13001']),
+ ('ZAPI', 'snapmirror-policy-create', ZRR['success']),
+ ('ZAPI', 'snapmirror-policy-add-rule', ZRR['success']),
+ ('ZAPI', 'snapmirror-policy-add-rule', ZRR['success']),
+ ('ZAPI', 'snapmirror-policy-add-rule', ZRR['success']),
+ ('ZAPI', 'snapmirror-policy-get-iter', ZRR['snapmirror-policy-info-with-rules']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'snapmirror_label': ['daily', 'weekly', 'monthly'],
+ 'keep': [7, 5, 12],
+ 'schedule': ['', 'weekly', 'monthly'],
+ 'prefix': ['', 'weekly', 'monthly']
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successful_create_with_rules_via_rest():
+ ''' creating snapmirror policy with rules via rest and testing idempotency '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'svm/svms', SRR['one_vserver_record']),
+ ('GET', 'snapmirror/policies', SRR['zero_records']),
+ ('POST', 'snapmirror/policies', SRR['success']),
+ ('GET', 'snapmirror/policies', SRR['get_snapmirror_policy_async']),
+ ('PATCH', 'snapmirror/policies/abcdef12-3456-7890-abcd-ef1234567890', SRR['success']),
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'svm/svms', SRR['one_vserver_record']),
+ ('GET', 'snapmirror/policies', SRR['get_snapmirror_policy_async_with_rules']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'snapmirror_label': ['daily', 'weekly', 'monthly'],
+ 'keep': [7, 5, 12],
+ 'schedule': ['', 'weekly', 'monthly'],
+ 'prefix': ['', 'weekly', 'monthly']
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successful_delete():
+ ''' deleting snapmirror policy and testing idempotency '''
+ register_responses([
+ ('ZAPI', 'snapmirror-policy-get-iter', ZRR['snapmirror-policy-info']),
+ ('ZAPI', 'snapmirror-policy-delete', ZRR['success']),
+ ('ZAPI', 'snapmirror-policy-get-iter', ZRR['no_records']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'state': 'absent'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successful_delete_with_rest():
+ ''' deleting snapmirror policy via REST and testing idempotency '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'svm/svms', SRR['one_vserver_record']),
+ ('GET', 'snapmirror/policies', SRR['get_snapmirror_policy_async_with_rules_dash']),
+ ('DELETE', 'snapmirror/policies/abcdef12-3456-7890-abcd-ef1234567890', SRR['success']),
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'svm/svms', SRR['one_vserver_record']),
+ ('GET', 'snapmirror/policies', SRR['get_snapmirror_policy_async_with_rules']),
+ ('DELETE', 'snapmirror/policies/abcdef12-3456-7890-abcd-ef1234567890', SRR['success']),
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'svm/svms', SRR['one_vserver_record']),
+ ('GET', 'snapmirror/policies', SRR['zero_records']),
+ ])
+ module_args = {
+ 'state': 'absent',
+ 'use_rest': 'always',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successful_modify():
+ ''' modifying snapmirror policy without rules. idempotency was tested in create '''
+ register_responses([
+ ('ZAPI', 'snapmirror-policy-get-iter', ZRR['snapmirror-policy-info']),
+ ('ZAPI', 'snapmirror-policy-modify', ZRR['success']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'comment': 'old comment',
+ 'ignore_atime': True,
+ 'is_network_compression_enabled': True,
+ 'owner': 'cluster_admin',
+ 'restart': 'default',
+ 'tries': '7'}
+
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successful_modify_with_rest():
+ ''' modifying snapmirror policy without rules via REST. Idempotency was tested in create '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'svm/svms', SRR['one_vserver_record']),
+ ('GET', 'snapmirror/policies', SRR['get_snapmirror_policy_async']),
+ ('PATCH', 'snapmirror/policies/abcdef12-3456-7890-abcd-ef1234567890', SRR['success']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'comment': 'old comment',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successful_modify_with_rules():
+ ''' modifying snapmirror policy with rules. Idempotency was tested in create '''
+ register_responses([
+ ('ZAPI', 'snapmirror-policy-get-iter', ZRR['snapmirror-policy-info']),
+ ('ZAPI', 'snapmirror-policy-add-rule', ZRR['success']),
+ ('ZAPI', 'snapmirror-policy-add-rule', ZRR['success']),
+ ('ZAPI', 'snapmirror-policy-add-rule', ZRR['success']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'snapmirror_label': ['daily', 'weekly', 'monthly'],
+ 'keep': [7, 5, 12],
+ 'schedule': ['', 'weekly', 'monthly'],
+ 'prefix': ['', 'weekly', 'monthly']
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successful_modify_with_rules_via_rest():
+ ''' modifying snapmirror policy with rules via rest. Idempotency was tested in create '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'svm/svms', SRR['one_vserver_record']),
+ ('GET', 'snapmirror/policies', SRR['get_snapmirror_policy_async']),
+ ('PATCH', 'snapmirror/policies/abcdef12-3456-7890-abcd-ef1234567890', SRR['success']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'snapmirror_label': ['daily', 'weekly', 'monthly'],
+ 'keep': [7, 5, 12],
+ 'schedule': ['', 'weekly', 'monthly'],
+ 'prefix': ['', 'weekly', 'monthly']
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_if_all_methods_catch_exception():
+ register_responses([
+ ('ZAPI', 'snapmirror-policy-get-iter', ZRR['error']),
+ ('ZAPI', 'snapmirror-policy-create', ZRR['error']),
+ ('ZAPI', 'snapmirror-policy-delete', ZRR['error']),
+ ('ZAPI', 'snapmirror-policy-modify', ZRR['error']),
+ ('ZAPI', 'snapmirror-policy-remove-rule', ZRR['error']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'common_snapshot_schedule': 'sched',
+ 'policy_type': 'sync_mirror',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ error = zapi_error_message('Error getting snapmirror policy ansible')
+ assert error in expect_and_capture_ansible_exception(my_obj.get_snapmirror_policy, 'fail')['msg']
+ error = zapi_error_message('Error creating snapmirror policy ansible')
+ assert error in expect_and_capture_ansible_exception(my_obj.create_snapmirror_policy, 'fail')['msg']
+ error = zapi_error_message('Error deleting snapmirror policy ansible')
+ assert error in expect_and_capture_ansible_exception(my_obj.delete_snapmirror_policy, 'fail')['msg']
+ error = zapi_error_message('Error modifying snapmirror policy ansible')
+ assert error in expect_and_capture_ansible_exception(my_obj.modify_snapmirror_policy, 'fail')['msg']
+ module_args = {
+ 'use_rest': 'never',
+ 'common_snapshot_schedule': 'sched',
+ 'policy_type': 'sync_mirror',
+ 'snapmirror_label': ['lbl1'],
+ 'keep': [24],
+ }
+ current = {
+ 'snapmirror_label': ['lbl2'],
+ 'keep': [24],
+ 'prefix': [''],
+ 'schedule': ['weekly'],
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ error = zapi_error_message('Error modifying snapmirror policy rule ansible')
+ assert error in expect_and_capture_ansible_exception(my_obj.modify_snapmirror_policy_rules, 'fail', current)['msg']
+
+
+def test_if_all_methods_catch_exception_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'svm/svms', SRR['one_vserver_record']),
+ ('GET', 'snapmirror/policies', SRR['generic_error']),
+ ('POST', 'snapmirror/policies', SRR['generic_error']),
+ ('DELETE', 'snapmirror/policies/uuid', SRR['generic_error']),
+ ('PATCH', 'snapmirror/policies/uuid', SRR['generic_error']),
+ # modifying rules
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'svm/svms', SRR['one_vserver_record']),
+ ('PATCH', 'snapmirror/policies/uuid', SRR['generic_error']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'policy_type': 'sync_mirror',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ error = rest_error_message('Error getting snapmirror policy', 'snapmirror/policies')
+ assert error in expect_and_capture_ansible_exception(my_obj.get_snapmirror_policy_rest, 'fail')['msg']
+ error = rest_error_message('Error creating snapmirror policy', 'snapmirror/policies')
+ assert error in expect_and_capture_ansible_exception(my_obj.create_snapmirror_policy, 'fail')['msg']
+ error = rest_error_message('Error deleting snapmirror policy', 'snapmirror/policies/uuid')
+ assert error in expect_and_capture_ansible_exception(my_obj.delete_snapmirror_policy, 'fail', 'uuid')['msg']
+ error = rest_error_message('Error modifying snapmirror policy', 'snapmirror/policies/uuid')
+ assert error in expect_and_capture_ansible_exception(my_obj.modify_snapmirror_policy, 'fail', 'uuid', {'key': 'value'})['msg']
+ module_args = {
+ 'use_rest': 'always',
+ 'policy_type': 'sync_mirror',
+ 'snapmirror_label': ['lbl1'],
+ 'keep': [24],
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ error = rest_error_message('Error modifying snapmirror policy rules', 'snapmirror/policies/uuid')
+ assert error in expect_and_capture_ansible_exception(my_obj.modify_snapmirror_policy_rules, 'fail', None, 'uuid')['msg']
+
+
+def test_create_snapmirror_policy_retention_obj_for_rest():
+ ''' test create_snapmirror_policy_retention_obj_for_rest '''
+ register_responses([
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+
+ # Test no rules
+ assert my_obj.create_snapmirror_policy_retention_obj_for_rest() == []
+
+ # Test one rule
+ rules = [{'snapmirror_label': 'daily', 'keep': 7}]
+ retention_obj = [{'label': 'daily', 'count': '7'}]
+ assert my_obj.create_snapmirror_policy_retention_obj_for_rest(rules) == retention_obj
+
+ # Test two rules, with a prefix
+ rules = [{'snapmirror_label': 'daily', 'keep': 7},
+ {'snapmirror_label': 'weekly', 'keep': 5, 'prefix': 'weekly'}]
+ retention_obj = [{'label': 'daily', 'count': '7'},
+ {'label': 'weekly', 'count': '5', 'prefix': 'weekly'}]
+ assert my_obj.create_snapmirror_policy_retention_obj_for_rest(rules) == retention_obj
+
+ # Test three rules, with a prefix & schedule
+ rules = [{'snapmirror_label': 'daily', 'keep': 7},
+ {'snapmirror_label': 'weekly', 'keep': 5, 'prefix': 'weekly_sv'},
+ {'snapmirror_label': 'monthly', 'keep': 12, 'prefix': 'monthly_sv', 'schedule': 'monthly'}]
+ retention_obj = [{'label': 'daily', 'count': '7'},
+ {'label': 'weekly', 'count': '5', 'prefix': 'weekly_sv'},
+ {'label': 'monthly', 'count': '12', 'prefix': 'monthly_sv', 'creation_schedule': {'name': 'monthly'}}]
+ assert my_obj.create_snapmirror_policy_retention_obj_for_rest(rules) == retention_obj
+
+
+def test_identify_snapmirror_policy_rules_with_schedule():
+ ''' test identify_snapmirror_policy_rules_with_schedule '''
+ register_responses([
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+
+ # Test no rules
+ assert my_obj.identify_snapmirror_policy_rules_with_schedule() == ([], [])
+
+ # Test one non-schedule rule identified
+ rules = [{'snapmirror_label': 'daily', 'keep': 7}]
+ schedule_rules = []
+ non_schedule_rules = [{'snapmirror_label': 'daily', 'keep': 7}]
+ assert my_obj.identify_snapmirror_policy_rules_with_schedule(rules) == (schedule_rules, non_schedule_rules)
+
+ # Test one schedule and two non-schedule rules identified
+ rules = [{'snapmirror_label': 'daily', 'keep': 7},
+ {'snapmirror_label': 'weekly', 'keep': 5, 'prefix': 'weekly_sv'},
+ {'snapmirror_label': 'monthly', 'keep': 12, 'prefix': 'monthly_sv', 'schedule': 'monthly'}]
+ schedule_rules = [{'snapmirror_label': 'monthly', 'keep': 12, 'prefix': 'monthly_sv', 'schedule': 'monthly'}]
+ non_schedule_rules = [{'snapmirror_label': 'daily', 'keep': 7},
+ {'snapmirror_label': 'weekly', 'keep': 5, 'prefix': 'weekly_sv'}]
+ assert my_obj.identify_snapmirror_policy_rules_with_schedule(rules) == (schedule_rules, non_schedule_rules)
+
+ # Test three schedule & zero non-schedule rules identified
+ rules = [{'snapmirror_label': 'daily', 'keep': 7, 'schedule': 'daily'},
+ {'snapmirror_label': 'weekly', 'keep': 5, 'prefix': 'weekly_sv', 'schedule': 'weekly'},
+ {'snapmirror_label': 'monthly', 'keep': 12, 'prefix': 'monthly_sv', 'schedule': 'monthly'}]
+ schedule_rules = [{'snapmirror_label': 'daily', 'keep': 7, 'schedule': 'daily'},
+ {'snapmirror_label': 'weekly', 'keep': 5, 'prefix': 'weekly_sv', 'schedule': 'weekly'},
+ {'snapmirror_label': 'monthly', 'keep': 12, 'prefix': 'monthly_sv', 'schedule': 'monthly'}]
+ non_schedule_rules = []
+ assert my_obj.identify_snapmirror_policy_rules_with_schedule(rules) == (schedule_rules, non_schedule_rules)
+
+
+def test_identify_new_snapmirror_policy_rules():
+ ''' test identify_new_snapmirror_policy_rules '''
+ register_responses([
+ ])
+
+ # Test with no rules in parameters. new_rules should always be [].
+ module_args = {
+ 'use_rest': 'never',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+
+ current = None
+ new_rules = []
+ assert my_obj.identify_new_snapmirror_policy_rules(current) == new_rules
+
+ current = {'snapmirror_label': ['daily'], 'keep': [7], 'prefix': [''], 'schedule': ['']}
+ new_rules = []
+ assert my_obj.identify_new_snapmirror_policy_rules(current) == new_rules
+
+ # Test with rules in parameters.
+ module_args = {
+ 'use_rest': 'never',
+ 'snapmirror_label': ['daily', 'weekly', 'monthly'],
+ 'keep': [7, 5, 12],
+ 'schedule': ['', 'weekly', 'monthly'],
+ 'prefix': ['', 'weekly', 'monthly']
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+
+ # Test three new rules identified when no rules currently exist
+ current = None
+ new_rules = [{'snapmirror_label': 'daily', 'keep': 7, 'prefix': '', 'schedule': ''},
+ {'snapmirror_label': 'weekly', 'keep': 5, 'prefix': 'weekly', 'schedule': 'weekly'},
+ {'snapmirror_label': 'monthly', 'keep': 12, 'prefix': 'monthly', 'schedule': 'monthly'}]
+ assert my_obj.identify_new_snapmirror_policy_rules(current) == new_rules
+
+ # Test two new rules identified and one rule already exists
+ current = {'snapmirror_label': ['daily'], 'keep': [7], 'prefix': [''], 'schedule': ['']}
+ new_rules = [{'snapmirror_label': 'weekly', 'keep': 5, 'prefix': 'weekly', 'schedule': 'weekly'},
+ {'snapmirror_label': 'monthly', 'keep': 12, 'prefix': 'monthly', 'schedule': 'monthly'}]
+ assert my_obj.identify_new_snapmirror_policy_rules(current) == new_rules
+
+ # Test one new rule identified and two rules already exist
+ current = {'snapmirror_label': ['daily', 'monthly'],
+ 'keep': [7, 12],
+ 'prefix': ['', 'monthly'],
+ 'schedule': ['', 'monthly']}
+ new_rules = [{'snapmirror_label': 'weekly', 'keep': 5, 'prefix': 'weekly', 'schedule': 'weekly'}]
+ assert my_obj.identify_new_snapmirror_policy_rules(current) == new_rules
+
+ # Test no new rules identified as all rules already exist
+ current = {'snapmirror_label': ['daily', 'monthly', 'weekly'],
+ 'keep': [7, 12, 5],
+ 'prefix': ['', 'monthly', 'weekly'],
+ 'schedule': ['', 'monthly', 'weekly']}
+ new_rules = []
+ assert my_obj.identify_new_snapmirror_policy_rules(current) == new_rules
+
+
+def test_identify_obsolete_snapmirror_policy_rules():
+ ''' test identify_obsolete_snapmirror_policy_rules '''
+ register_responses([
+ ])
+
+ # Test with no rules in parameters. obsolete_rules should always be [].
+ module_args = {
+ 'use_rest': 'never',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+
+ current = None
+ obsolete_rules = []
+ assert my_obj.identify_obsolete_snapmirror_policy_rules(current) == obsolete_rules
+
+ current = {'snapmirror_label': ['daily'], 'keep': [7], 'prefix': [''], 'schedule': ['']}
+ obsolete_rules = []
+ assert my_obj.identify_obsolete_snapmirror_policy_rules(current) == obsolete_rules
+
+ # Test removing all rules. obsolete_rules should equal current.
+ module_args = {
+ 'use_rest': 'never',
+ 'snapmirror_label': []
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+
+ current = {'snapmirror_label': ['monthly', 'weekly', 'hourly', 'daily', 'yearly'],
+ 'keep': [12, 5, 24, 7, 7],
+ 'prefix': ['monthly', 'weekly', '', '', 'yearly'],
+ 'schedule': ['monthly', 'weekly', '', '', 'yearly']}
+ obsolete_rules = [{'snapmirror_label': 'monthly', 'keep': 12, 'prefix': 'monthly', 'schedule': 'monthly'},
+ {'snapmirror_label': 'weekly', 'keep': 5, 'prefix': 'weekly', 'schedule': 'weekly'},
+ {'snapmirror_label': 'hourly', 'keep': 24, 'prefix': '', 'schedule': ''},
+ {'snapmirror_label': 'daily', 'keep': 7, 'prefix': '', 'schedule': ''},
+ {'snapmirror_label': 'yearly', 'keep': 7, 'prefix': 'yearly', 'schedule': 'yearly'}]
+ assert my_obj.identify_obsolete_snapmirror_policy_rules(current) == obsolete_rules
+
+ # Test with rules in parameters.
+ module_args = {
+ 'use_rest': 'never',
+ 'snapmirror_label': ['daily', 'weekly', 'monthly'],
+ 'keep': [7, 5, 12],
+ 'schedule': ['', 'weekly', 'monthly'],
+ 'prefix': ['', 'weekly', 'monthly']
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+
+ # Test no rules exist, thus no obsolete rules
+ current = None
+ obsolete_rules = []
+ assert my_obj.identify_obsolete_snapmirror_policy_rules(current) == obsolete_rules
+
+ # Test new rules and one obsolete rule identified
+ current = {'snapmirror_label': ['hourly'], 'keep': [24], 'prefix': [''], 'schedule': ['']}
+ obsolete_rules = [{'snapmirror_label': 'hourly', 'keep': 24, 'prefix': '', 'schedule': ''}]
+ assert my_obj.identify_obsolete_snapmirror_policy_rules(current) == obsolete_rules
+
+ # Test new rules, with one retained and one obsolete rule identified
+ current = {'snapmirror_label': ['hourly', 'daily'],
+ 'keep': [24, 7],
+ 'prefix': ['', ''],
+ 'schedule': ['', '']}
+ obsolete_rules = [{'snapmirror_label': 'hourly', 'keep': 24, 'prefix': '', 'schedule': ''}]
+ assert my_obj.identify_obsolete_snapmirror_policy_rules(current) == obsolete_rules
+
+ # Test new rules and two obsolete rules identified
+ current = {'snapmirror_label': ['monthly', 'weekly', 'hourly', 'daily', 'yearly'],
+ 'keep': [12, 5, 24, 7, 7],
+ 'prefix': ['monthly', 'weekly', '', '', 'yearly'],
+ 'schedule': ['monthly', 'weekly', '', '', 'yearly']}
+ obsolete_rules = [{'snapmirror_label': 'hourly', 'keep': 24, 'prefix': '', 'schedule': ''},
+ {'snapmirror_label': 'yearly', 'keep': 7, 'prefix': 'yearly', 'schedule': 'yearly'}]
+ assert my_obj.identify_obsolete_snapmirror_policy_rules(current) == obsolete_rules
+
+
+def test_identify_modified_snapmirror_policy_rules():
+ ''' test identify_modified_snapmirror_policy_rules '''
+ register_responses([
+
+ ])
+
+ # Test with no rules in parameters. modified_rules & unmodified_rules should always be [].
+ module_args = {
+ 'use_rest': 'never',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+
+ current = None
+ modified_rules, unmodified_rules = [], []
+ assert my_obj.identify_modified_snapmirror_policy_rules(current), (modified_rules == unmodified_rules)
+
+ current = {'snapmirror_label': ['daily'], 'keep': [14], 'prefix': ['daily'], 'schedule': ['daily']}
+ modified_rules, unmodified_rules = [], []
+ assert my_obj.identify_modified_snapmirror_policy_rules(current), (modified_rules == unmodified_rules)
+
+ # Test removing all rules. modified_rules & unmodified_rules should be [].
+ module_args = {
+ 'use_rest': 'never',
+ 'snapmirror_label': []
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ current = {'snapmirror_label': ['monthly', 'weekly', 'hourly', 'daily', 'yearly'],
+ 'keep': [12, 5, 24, 7, 7],
+ 'prefix': ['monthly', 'weekly', '', '', 'yearly'],
+ 'schedule': ['monthly', 'weekly', '', '', 'yearly']}
+ modified_rules, unmodified_rules = [], []
+ assert my_obj.identify_modified_snapmirror_policy_rules(current), (modified_rules == unmodified_rules)
+
+ # Test with rules in parameters.
+ module_args = {
+ 'use_rest': 'never',
+ 'snapmirror_label': ['daily', 'weekly', 'monthly'],
+ 'keep': [7, 5, 12],
+ 'schedule': ['', 'weekly', 'monthly'],
+ 'prefix': ['', 'weekly', 'monthly']
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+
+ # Test no rules exist, thus no modified & unmodified rules
+ current = None
+ modified_rules, unmodified_rules = [], []
+ assert my_obj.identify_modified_snapmirror_policy_rules(current), (modified_rules == unmodified_rules)
+
+ # Test new rules don't exist, thus no modified & unmodified rules
+ current = {'snapmirror_label': ['hourly'], 'keep': [24], 'prefix': [''], 'schedule': ['']}
+ modified_rules, unmodified_rules = [], []
+ assert my_obj.identify_modified_snapmirror_policy_rules(current), (modified_rules == unmodified_rules)
+
+ # Test daily & monthly modified, weekly unmodified
+ current = {'snapmirror_label': ['hourly', 'daily', 'weekly', 'monthly'],
+ 'keep': [24, 14, 5, 6],
+ 'prefix': ['', 'daily', 'weekly', 'monthly'],
+ 'schedule': ['', 'daily', 'weekly', 'monthly']}
+ modified_rules = [{'snapmirror_label': 'daily', 'keep': 7, 'prefix': '', 'schedule': ''},
+ {'snapmirror_label': 'monthly', 'keep': 12, 'prefix': 'monthly', 'schedule': 'monthly'}]
+ unmodified_rules = [{'snapmirror_label': 'weekly', 'keep': 5, 'prefix': 'weekly', 'schedule': 'weekly'}]
+ assert my_obj.identify_modified_snapmirror_policy_rules(current), (modified_rules == unmodified_rules)
+
+ # Test all rules modified
+ current = {'snapmirror_label': ['daily', 'weekly', 'monthly'],
+ 'keep': [14, 10, 6],
+ 'prefix': ['', '', ''],
+ 'schedule': ['daily', 'weekly', 'monthly']}
+ modified_rules = [{'snapmirror_label': 'daily', 'keep': 7, 'prefix': '', 'schedule': ''},
+ {'snapmirror_label': 'weekly', 'keep': 5, 'prefix': 'weekly', 'schedule': 'weekly'},
+ {'snapmirror_label': 'monthly', 'keep': 12, 'prefix': 'monthly', 'schedule': 'monthly'}]
+ unmodified_rules = []
+ assert my_obj.identify_modified_snapmirror_policy_rules(current), (modified_rules == unmodified_rules)
+
+ # Test all rules unmodified
+ current = {'snapmirror_label': ['daily', 'weekly', 'monthly'],
+ 'keep': [7, 5, 12],
+ 'prefix': ['', 'weekly', 'monthly'],
+ 'schedule': ['', 'weekly', 'monthly']}
+ modified_rules = []
+ unmodified_rules = [{'snapmirror_label': 'daily', 'keep': 7, 'prefix': '', 'schedule': ''},
+ {'snapmirror_label': 'weekly', 'keep': 5, 'prefix': 'weekly', 'schedule': 'weekly'},
+ {'snapmirror_label': 'monthly', 'keep': 12, 'prefix': 'monthly', 'schedule': 'monthly'}]
+ assert my_obj.identify_modified_snapmirror_policy_rules(current), (modified_rules == unmodified_rules)
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.HAS_NETAPP_LIB', False)
+def test_module_fail_when_netapp_lib_missing():
+ ''' required lib missing '''
+ module_args = {
+ 'use_rest': 'never',
+ }
+ assert 'Error: the python NetApp-Lib module is required. Import error: None' in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_validate_parameters():
+ ''' test test_validate_parameters '''
+ register_responses([
+ ])
+
+ args = dict(DEFAULT_ARGS)
+ args.pop('vserver')
+ module_args = {
+ 'use_rest': 'never',
+ }
+ error = 'Error: vserver is a required parameter when using ZAPI.'
+ assert error in create_module(my_module, args, module_args, fail=True)['msg']
+
+ module_args = {
+ 'use_rest': 'never',
+ 'snapmirror_label': list(range(11)),
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ error = 'Error: A SnapMirror Policy can have up to a maximum of'
+ assert error in expect_and_capture_ansible_exception(my_obj.validate_parameters, 'fail')['msg']
+
+ module_args = {
+ 'use_rest': 'never',
+ 'snapmirror_label': list(range(10)),
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ error = "Error: Missing 'keep' parameter. When specifying the 'snapmirror_label' parameter, the 'keep' parameter must also be supplied"
+ assert error in expect_and_capture_ansible_exception(my_obj.validate_parameters, 'fail')['msg']
+
+ module_args = {
+ 'use_rest': 'never',
+ 'snapmirror_label': list(range(10)),
+ 'keep': list(range(9)),
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ error = "Error: Each 'snapmirror_label' value must have an accompanying 'keep' value"
+ assert error in expect_and_capture_ansible_exception(my_obj.validate_parameters, 'fail')['msg']
+
+ module_args = {
+ 'use_rest': 'never',
+ 'snapmirror_label': list(range(10)),
+ 'keep': list(range(11)),
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ error = "Error: Each 'keep' value must have an accompanying 'snapmirror_label' value"
+ assert error in expect_and_capture_ansible_exception(my_obj.validate_parameters, 'fail')['msg']
+
+ module_args = {
+ 'use_rest': 'never',
+ 'keep': list(range(11)),
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ error = "Error: Missing 'snapmirror_label' parameter. When specifying the 'keep' parameter, the 'snapmirror_label' parameter must also be supplied"
+ assert error in expect_and_capture_ansible_exception(my_obj.validate_parameters, 'fail')['msg']
+
+ module_args = {
+ 'use_rest': 'never',
+ 'snapmirror_label': list(range(10)),
+ 'keep': list(range(10)),
+ 'prefix': list(range(10)),
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ error = "Error: Missing 'schedule' parameter. When specifying the 'prefix' parameter, the 'schedule' parameter must also be supplied"
+ assert error in expect_and_capture_ansible_exception(my_obj.validate_parameters, 'fail')['msg']
+
+ module_args = {
+ 'use_rest': 'never',
+ 'identity_preservation': 'full',
+ }
+ error = 'Error: identity_preservation option is not supported with ZAPI. It can only be used with REST.'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+ module_args = {
+ 'use_rest': 'never',
+ 'copy_all_source_snapshots': True,
+ }
+ error = 'Error: copy_all_source_snapshots option is not supported with ZAPI. It can only be used with REST.'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_validate_parameters_rest():
+ ''' test test_validate_parameters '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'svm/svms', SRR['one_vserver_record']),
+ ('GET', 'snapmirror/policies', SRR['zero_records']),
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'svm/svms', SRR['one_vserver_record']),
+ ('GET', 'snapmirror/policies', SRR['zero_records']),
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'svm/svms', SRR['one_vserver_record']),
+ ('GET', 'snapmirror/policies', SRR['zero_records']),
+ ('POST', 'snapmirror/policies', SRR['success']),
+ ('GET', 'snapmirror/policies', SRR['get_snapmirror_policy_async']),
+ # copy_all_source_snapshots
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ # copy_latest_source_snapshot
+ ('GET', 'cluster', SRR['is_rest_9_11_1']),
+ # create_snapshot_on_source
+ ('GET', 'cluster', SRR['is_rest_9_11_1']),
+ ])
+
+ module_args = {
+ 'use_rest': 'always',
+ 'policy_type': 'sync_mirror',
+ 'is_network_compression_enabled': True
+ }
+ error = 'Error: input parameter network_compression_enabled is not valid for SnapMirror policy type sync'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+ module_args = {
+ 'use_rest': 'always',
+ 'policy_type': 'sync_mirror',
+ 'identity_preservation': 'full'
+ }
+ error = 'Error: identity_preservation is only supported with async (async) policy_type, got: sync'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+ module_args = {
+ 'use_rest': 'always',
+ 'policy_type': 'async_mirror',
+ 'is_network_compression_enabled': True,
+ 'identity_preservation': 'full'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+ module_args = {
+ 'use_rest': 'always',
+ 'policy_type': 'async_mirror',
+ 'copy_all_source_snapshots': False,
+ }
+ error = 'Error: the property copy_all_source_snapshots can only be set to true when present'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+ module_args = {
+ 'use_rest': 'always',
+ 'policy_type': 'async_mirror',
+ 'copy_latest_source_snapshot': False,
+ }
+ error = 'Error: the property copy_latest_source_snapshot can only be set to true when present'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+ module_args = {
+ 'use_rest': 'always',
+ 'policy_type': 'vault',
+ 'create_snapshot_on_source': True,
+ }
+ error = 'Error: the property create_snapshot_on_source can only be set to false when present'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_errors_in_create():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'svm/svms', SRR['one_vserver_record']),
+ ('GET', 'snapmirror/policies', SRR['zero_records']),
+ ('POST', 'snapmirror/policies', SRR['success']),
+ ('GET', 'snapmirror/policies', SRR['zero_records']),
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'svm/svms', SRR['one_vserver_record']),
+ ('GET', 'snapmirror/policies', SRR['get_snapmirror_policy_sync']),
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'svm/svms', SRR['one_vserver_record']),
+ ('GET', 'snapmirror/policies', SRR['get_snapmirror_policy_async']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ }
+ error = 'Error: policy ansible not present after create.'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+ # change in policy type
+ module_args = {
+ 'use_rest': 'always',
+ 'policy_type': 'async_mirror',
+ }
+ error = 'Error: The policy property policy_type cannot be modified from sync to async'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ module_args = {
+ 'use_rest': 'always',
+ 'policy_type': 'sync_mirror',
+ }
+ error = 'Error: The policy property policy_type cannot be modified from async to sync'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_errors_in_create_with_copy_snapshots():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'copy_all_source_snapshots': True,
+ 'policy_type': 'sync_mirror'
+ }
+ msg = 'Error: option copy_all_source_snapshots is not supported with policy type sync_mirror.'
+ error = call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert msg in error
+
+
+def test_errors_in_create_with_copy_latest_snapshots():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_12_1']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'copy_latest_source_snapshot': True,
+ 'policy_type': 'async',
+ 'snapmirror_label': ["daily", "weekly"],
+ }
+ msg = 'Error: Retention properties cannot be specified along with copy_all_source_snapshots or copy_latest_source_snapshot properties'
+ error = call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert msg in error
+
+
+def test_errors_in_create_snapshot_on_source():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_12_1']),
+ ('GET', 'cluster', SRR['is_rest_9_12_1']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'create_snapshot_on_source': False,
+ 'policy_type': 'sync_mirror',
+ 'snapmirror_label': ["daily", "weekly"],
+ 'keep': ["7", "2"],
+ }
+ msg = 'Error: option create_snapshot_on_source is not supported with policy type sync_mirror.'
+ error = call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert msg in error
+
+ module_args = {
+ 'use_rest': 'always',
+ 'create_snapshot_on_source': False,
+ 'policy_type': 'async',
+ 'snapmirror_label': ["daily", "weekly"],
+ }
+ msg = 'Error: The properties snapmirror_label and keep must be specified with'
+ error = call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert msg in error
+
+
+def test_async_create_snapshot_on_source():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_12_1']),
+ ('GET', 'svm/svms', SRR['one_vserver_record']),
+ ('GET', 'snapmirror/policies', SRR['empty_records']),
+ ('POST', 'snapmirror/policies', SRR['success']),
+ ('GET', 'snapmirror/policies', SRR['get_snapmirror_policy_async_with_create_snapshot_on_source']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'create_snapshot_on_source': False,
+ 'policy_type': 'vault',
+ 'snapmirror_label': ["daily", "weekly"],
+ 'keep': ["7", "2"],
+ 'prefix': ["p1", "p2"],
+ 'schedule': ["daily", "weekly"],
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_get_snapmirror_policy_sync_with_sync_type():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'svm/svms', SRR['one_vserver_record']),
+ ('GET', 'snapmirror/policies', SRR['empty_records']),
+ ('POST', 'snapmirror/policies', SRR['success']),
+ ('GET', 'snapmirror/policies', SRR['get_snapmirror_policy_sync_with_sync_type']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'policy_type': 'sync_mirror',
+ 'sync_type': 'automated_failover'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_set_scope():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'svm/svms', SRR['zero_records']),
+ # first test
+ ('GET', 'svm/svms', SRR['zero_records']),
+ ('GET', 'svm/svms', SRR['one_vserver_record']),
+ ('GET', 'svm/svms', SRR['generic_error']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ # vserver not found
+ assert my_obj.set_scope() == 'cluster'
+ # vserver found
+ assert my_obj.set_scope() == 'svm'
+ # API error
+ error = rest_error_message('Error getting vserver ansible info', 'svm/svms')
+ assert error in expect_and_capture_ansible_exception(my_obj.set_scope, 'fail')['msg']
+ # no vserver
+ my_obj.parameters.pop('vserver')
+ assert my_obj.set_scope() == 'cluster'
+
+
+def check_mapping(my_obj, policy_type, expected_policy_type, copy_latest_source_snapshot, copy_all_source_snapshots, create_snapshot_on_source, retention):
+ my_obj.parameters['policy_type'] = policy_type
+ if copy_latest_source_snapshot is None:
+ my_obj.parameters.pop('copy_latest_source_snapshot', None)
+ else:
+ my_obj.parameters['copy_latest_source_snapshot'] = copy_latest_source_snapshot
+ if copy_all_source_snapshots is None:
+ my_obj.parameters.pop('copy_all_source_snapshots', None)
+ else:
+ my_obj.parameters['copy_all_source_snapshots'] = copy_all_source_snapshots
+ if create_snapshot_on_source is None:
+ my_obj.parameters.pop('create_snapshot_on_source', None)
+ else:
+ my_obj.parameters['create_snapshot_on_source'] = create_snapshot_on_source
+ if retention is None:
+ my_obj.parameters.pop('snapmirror_label', None)
+ my_obj.parameters.pop('keep', None)
+ my_obj.parameters.pop('prefix', None)
+ my_obj.parameters.pop('schedule', None)
+ else:
+ for key, value in retention.items():
+ my_obj.parameters[key] = value
+ my_obj.validate_policy_type()
+ assert my_obj.parameters['policy_type'] == expected_policy_type
+
+
+def check_options(my_obj, copy_latest_source_snapshot, copy_all_source_snapshots, create_snapshot_on_source):
+ if copy_latest_source_snapshot is None:
+ assert 'copy_latest_source_snapshot' not in my_obj.parameters
+ else:
+ assert my_obj.parameters['copy_latest_source_snapshot'] == copy_latest_source_snapshot
+ if copy_all_source_snapshots is None:
+ assert 'copy_all_source_snapshots' not in my_obj.parameters
+ else:
+ assert my_obj.parameters['copy_all_source_snapshots'] == copy_all_source_snapshots
+ if create_snapshot_on_source is None:
+ assert 'create_snapshot_on_source' not in my_obj.parameters
+ else:
+ assert my_obj.parameters['create_snapshot_on_source'] == create_snapshot_on_source
+
+
+def test_validate_policy_type():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'svm/svms', SRR['zero_records']),
+ # first test
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ }
+ retention = {
+ 'snapmirror_label': ["daily", "weekly"],
+ 'keep': ["7", "2"]
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ check_mapping(my_obj, 'async', 'async', None, None, None, None)
+ check_options(my_obj, None, None, None)
+ check_mapping(my_obj, 'mirror_vault', 'async', None, None, None, None)
+ check_options(my_obj, None, None, None)
+ check_mapping(my_obj, 'vault', 'async', None, None, None, retention)
+ check_options(my_obj, None, None, False)
+ check_mapping(my_obj, 'async_mirror', 'async', None, None, None, None)
+ check_options(my_obj, True, None, None)
+ check_mapping(my_obj, 'sync', 'sync', None, None, None, None)
+ check_options(my_obj, None, None, None)
+ check_mapping(my_obj, 'sync_mirror', 'sync', None, None, None, None)
+ check_options(my_obj, None, None, None)
+ check_mapping(my_obj, 'strict_sync_mirror', 'sync', None, None, None, None)
+ check_options(my_obj, None, None, None)
+
+ my_obj.parameters['policy_type'] = 'async'
+ my_obj.parameters['sync_type'] = 'strict_sync'
+ error = "Error: 'sync_type' is only applicable for sync policy_type"
+ assert error in expect_and_capture_ansible_exception(my_obj.validate_policy_type, 'fail')['msg']
+
+ module_args = {
+ 'use_rest': 'never',
+ 'policy_type': 'sync'
+ }
+ error = 'Error: The policy types async and sync are not supported in ZAPI.'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_build_body_for_create():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['zero_records']),
+ # first test
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'snapmirror_label': ["daily", "weekly"],
+ 'keep': ["7", "2"],
+ 'copy_all_source_snapshots': True
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ body = my_obj.build_body_for_create()
+ assert 'copy_all_source_snapshots' in body
+
+
+def test_modify_snapmirror_policy_rules_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'svm/svms', SRR['zero_records']),
+ # first test
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'snapmirror_label': ["daily", "weekly"],
+ 'keep': ["7", "2"],
+ 'copy_all_source_snapshots': True
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.modify_snapmirror_policy_rules_rest('uuid', [], ['umod'], [], []) is None
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_snapshot.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_snapshot.py
new file mode 100644
index 000000000..f7c49eaad
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_snapshot.py
@@ -0,0 +1,363 @@
+# (c) 2018, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests ONTAP Ansible module: na_ontap_nvme_snapshot'''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import\
+ call_main, create_and_apply, create_module, expect_and_capture_ansible_exception, patch_ansible
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import\
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_responses
+
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_snapshot \
+ import NetAppOntapSnapshot as my_module, main as my_main
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+SRR = rest_responses({
+ 'volume_uuid': (200,
+ {'records': [{"uuid": "test_uuid"}], 'num_records': 1}, None,
+ ),
+ 'snapshot_record': (200,
+ {'records': [{"volume": {"uuid": "d9cd4ec5-c96d-11eb-9271-005056b3ef5a",
+ "name": "ansible_vol"},
+ "uuid": "343b5227-8c6b-4e79-a133-304bbf7537ce",
+ "svm": {"uuid": "b663d6f0-c96d-11eb-9271-005056b3ef5a",
+ "name": "ansible"},
+ "name": "ss1",
+ "create_time": "2021-06-10T17:24:41-04:00",
+ "comment": "123",
+ "expiry_time": "2022-02-04T14:00:00-05:00",
+ "snapmirror_label": "321", }], 'num_records': 1}, None),
+ 'create_response': (200, {'job': {'uuid': 'd0b3eefe-cd59-11eb-a170-005056b338cd',
+ '_links': {
+ 'self': {'href': '/api/cluster/jobs/d0b3eefe-cd59-11eb-a170-005056b338cd'}}}},
+ None),
+ 'job_response': (200, {'uuid': 'e43a40db-cd61-11eb-a170-005056b338cd',
+ 'description': 'PATCH /api/storage/volumes/d9cd4ec5-c96d-11eb-9271-005056b3ef5a/'
+ 'snapshots/da995362-cd61-11eb-a170-005056b338cd',
+ 'state': 'success',
+ 'message': 'success',
+ 'code': 0,
+ 'start_time': '2021-06-14T18:43:08-04:00',
+ 'end_time': '2021-06-14T18:43:08-04:00',
+ 'svm': {'name': 'ansible', 'uuid': 'b663d6f0-c96d-11eb-9271-005056b3ef5a',
+ '_links': {'self': {'href': '/api/svm/svms/b663d6f0-c96d-11eb-9271-005056b3ef5a'}}},
+ '_links': {'self': {'href': '/api/cluster/jobs/e43a40db-cd61-11eb-a170-005056b338cd'}}},
+ None)
+}, allow_override=False)
+
+
+snapshot_info = {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'snapshot-info': {
+ 'comment': 'new comment',
+ 'name': 'ansible',
+ 'snapmirror-label': 'label12'
+ }
+ }
+}
+
+ZRR = zapi_responses({
+ 'get_snapshot': build_zapi_response(snapshot_info)
+})
+
+
+DEFAULT_ARGS = {
+ 'state': 'present',
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'vserver': 'vserver',
+ 'comment': 'test comment',
+ 'snapshot': 'test_snapshot',
+ 'snapmirror_label': 'test_label',
+ 'volume': 'test_vol'
+}
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ error = create_module(my_module, fail=True)['msg']
+ assert 'missing required arguments:' in error
+ for arg in ('hostname', 'snapshot', 'volume', 'vserver'):
+ assert arg in error
+
+
+def test_ensure_get_called():
+ ''' test get_snapshot() for non-existent snapshot'''
+ register_responses([
+ ('snapshot-get-iter', ZRR['empty']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.get_snapshot() is None
+
+
+def test_ensure_get_called_existing():
+ ''' test get_snapshot() for existing snapshot'''
+ register_responses([
+ ('snapshot-get-iter', ZRR['get_snapshot']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.get_snapshot()
+
+
+def test_successful_create():
+ ''' creating snapshot and testing idempotency '''
+ register_responses([
+ ('snapshot-get-iter', ZRR['empty']),
+ ('snapshot-create', ZRR['success']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'async_bool': True
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successful_modify():
+ ''' modifying snapshot and testing idempotency '''
+ register_responses([
+ ('snapshot-get-iter', ZRR['get_snapshot']),
+ ('snapshot-modify-iter', ZRR['success']),
+ ('snapshot-get-iter', ZRR['get_snapshot']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'comment': 'adding comment',
+ 'snapmirror_label': 'label22',
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+ module_args = {
+ 'use_rest': 'never',
+ 'comment': 'new comment',
+ 'snapmirror_label': 'label12',
+ }
+ assert not create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successful_rename():
+ ''' modifying snapshot and testing idempotency '''
+ register_responses([
+ ('snapshot-get-iter', ZRR['empty']),
+ ('snapshot-get-iter', ZRR['get_snapshot']),
+ ('snapshot-rename', ZRR['success']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'from_name': 'from_snapshot',
+ 'comment': 'new comment',
+ 'snapmirror_label': 'label12',
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successful_delete():
+ ''' deleting snapshot and testing idempotency '''
+ register_responses([
+ ('snapshot-get-iter', ZRR['get_snapshot']),
+ ('snapshot-delete', ZRR['success']),
+ ('snapshot-get-iter', ZRR['empty']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'state': 'absent',
+ 'ignore_owners': True,
+ 'snapshot_instance_uuid': 'uuid',
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_if_all_methods_catch_exception():
+ register_responses([
+ ('snapshot-get-iter', ZRR['error']),
+ ('snapshot-create', ZRR['error']),
+ ('snapshot-delete', ZRR['error']),
+ ('snapshot-modify-iter', ZRR['error']),
+ ('snapshot-rename', ZRR['error']),
+ ('GET', 'cluster', SRR['is_rest_9_9_0']), # get version
+ ('GET', 'storage/volumes/None/snapshots', SRR['generic_error']),
+ ('POST', 'storage/volumes/None/snapshots', SRR['generic_error']),
+ ('DELETE', 'storage/volumes/None/snapshots/None', SRR['generic_error']),
+ ('PATCH', 'storage/volumes/None/snapshots/None', SRR['generic_error']),
+ ('GET', 'storage/volumes', SRR['generic_error'])
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'from_name': 'from_snapshot'}
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert 'Error fetching snapshot' in expect_and_capture_ansible_exception(my_obj.get_snapshot, 'fail')['msg']
+ assert 'Error creating snapshot test_snapshot:' in expect_and_capture_ansible_exception(my_obj.create_snapshot, 'fail')['msg']
+ assert 'Error deleting snapshot test_snapshot:' in expect_and_capture_ansible_exception(my_obj.delete_snapshot, 'fail')['msg']
+ assert 'Error modifying snapshot test_snapshot:' in expect_and_capture_ansible_exception(my_obj.modify_snapshot, 'fail')['msg']
+ assert 'Error renaming snapshot from_snapshot to test_snapshot:' in expect_and_capture_ansible_exception(my_obj.rename_snapshot, 'fail')['msg']
+ module_args = {'use_rest': 'always'}
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert 'Error fetching snapshot' in expect_and_capture_ansible_exception(my_obj.get_snapshot, 'fail')['msg']
+ assert 'Error when creating snapshot:' in expect_and_capture_ansible_exception(my_obj.create_snapshot, 'fail')['msg']
+ assert 'Error when deleting snapshot:' in expect_and_capture_ansible_exception(my_obj.delete_snapshot, 'fail')['msg']
+ assert 'Error when modifying snapshot:' in expect_and_capture_ansible_exception(my_obj.modify_snapshot, 'fail')['msg']
+ assert 'Error getting volume info:' in expect_and_capture_ansible_exception(my_obj.get_volume_uuid, 'fail')['msg']
+
+
+def test_module_fail_rest_ONTAP96():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']) # get version
+ ])
+ module_args = {'use_rest': 'always'}
+ msg = 'Error: Minimum version of ONTAP for snapmirror_label is (9, 7)'
+ assert msg in create_module(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_rest_successfully_create():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/volumes', SRR['volume_uuid']),
+ ('GET', 'storage/volumes/test_uuid/snapshots', SRR['empty_records']),
+ ('POST', 'storage/volumes/test_uuid/snapshots', SRR['create_response']),
+ ('GET', 'cluster/jobs/d0b3eefe-cd59-11eb-a170-005056b338cd', SRR['job_response']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'expiry_time': 'expiry'
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_error_create_no_volume():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/volumes', SRR['empty_records']),
+ ])
+ module_args = {'use_rest': 'always'}
+ msg = 'Error: volume test_vol not found for vserver vserver.'
+ assert msg == create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_rest_successfully_modify():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/volumes', SRR['volume_uuid']),
+ ('GET', 'storage/volumes/test_uuid/snapshots', SRR['snapshot_record']),
+ ('PATCH', 'storage/volumes/test_uuid/snapshots/343b5227-8c6b-4e79-a133-304bbf7537ce', SRR['create_response']), # modify
+ ('GET', 'cluster/jobs/d0b3eefe-cd59-11eb-a170-005056b338cd', SRR['job_response']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'comment': 'new comment',
+ 'expiry_time': 'expiry'
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_successfully_rename():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/volumes', SRR['volume_uuid']),
+ ('GET', 'storage/volumes/test_uuid/snapshots', SRR['empty_records']),
+ ('GET', 'storage/volumes/test_uuid/snapshots', SRR['snapshot_record']),
+ ('PATCH', 'storage/volumes/test_uuid/snapshots/343b5227-8c6b-4e79-a133-304bbf7537ce', SRR['create_response']), # modify
+ ('GET', 'cluster/jobs/d0b3eefe-cd59-11eb-a170-005056b338cd', SRR['job_response']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'from_name': 'old_snapshot'}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_error_rename_from_not_found():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/volumes', SRR['volume_uuid']),
+ ('GET', 'storage/volumes/test_uuid/snapshots', SRR['empty_records']),
+ ('GET', 'storage/volumes/test_uuid/snapshots', SRR['empty_records']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'from_name': 'old_snapshot'}
+ msg = 'Error renaming snapshot: test_snapshot - no snapshot with from_name: old_snapshot.'
+ assert msg == create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_rest_successfully_delete():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/volumes', SRR['volume_uuid']),
+ ('GET', 'storage/volumes/test_uuid/snapshots', SRR['snapshot_record']),
+ ('DELETE', 'storage/volumes/test_uuid/snapshots/343b5227-8c6b-4e79-a133-304bbf7537ce', SRR['success']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'state': 'absent'}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_error_delete():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/volumes', SRR['volume_uuid']),
+ ('GET', 'storage/volumes/test_uuid/snapshots', SRR['snapshot_record']),
+ ('DELETE', 'storage/volumes/test_uuid/snapshots/343b5227-8c6b-4e79-a133-304bbf7537ce', SRR['generic_error']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'state': 'absent'}
+ msg = 'Error when deleting snapshot: calling: storage/volumes/test_uuid/snapshots/343b5227-8c6b-4e79-a133-304bbf7537ce: got Expected error.'
+ assert msg == create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_call_main():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/volumes', SRR['volume_uuid']),
+ ('GET', 'storage/volumes/test_uuid/snapshots', SRR['snapshot_record']),
+ ('DELETE', 'storage/volumes/test_uuid/snapshots/343b5227-8c6b-4e79-a133-304bbf7537ce', SRR['generic_error']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'state': 'absent'}
+ msg = 'Error when deleting snapshot: calling: storage/volumes/test_uuid/snapshots/343b5227-8c6b-4e79-a133-304bbf7537ce: got Expected error.'
+ assert msg == call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_unsupported_options():
+ module_args = {
+ 'use_rest': 'always',
+ 'ignore_owners': True}
+ error = "REST API currently does not support 'ignore_owners'"
+ assert error == create_module(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ module_args = {
+ 'use_rest': 'never',
+ 'expiry_time': 'any'}
+ error = "expiry_time is currently only supported with REST on Ontap 9.6 or higher"
+ assert error == create_module(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.has_netapp_lib')
+def test_missing_netapp_lib(mock_has_netapp_lib):
+ module_args = {
+ 'use_rest': 'never',
+ }
+ mock_has_netapp_lib.return_value = False
+ msg = 'Error: the python NetApp-Lib module is required. Import error: None'
+ assert msg == create_module(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_snapshot_policy.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_snapshot_policy.py
new file mode 100644
index 000000000..84d928f19
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_snapshot_policy.py
@@ -0,0 +1,658 @@
+# (c) 2018, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests ONTAP Ansible module: na_ontap_snapshot_policy'''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat import unittest
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch, Mock
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_snapshot_policy \
+ import NetAppOntapSnapshotPolicy as my_module
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+class MockONTAPConnection(object):
+ ''' mock server connection to ONTAP host '''
+
+ def __init__(self, kind=None):
+ ''' save arguments '''
+ self.type = kind
+ self.xml_in = None
+ self.xml_out = None
+
+ def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
+ ''' mock invoke_successfully returning xml data '''
+ self.xml_in = xml
+ if self.type == 'policy':
+ xml = self.build_snapshot_policy_info()
+ elif self.type == 'snapshot_policy_info_policy_disabled':
+ xml = self.build_snapshot_policy_info_policy_disabled()
+ elif self.type == 'snapshot_policy_info_comment_modified':
+ xml = self.build_snapshot_policy_info_comment_modified()
+ elif self.type == 'snapshot_policy_info_schedules_added':
+ xml = self.build_snapshot_policy_info_schedules_added()
+ elif self.type == 'snapshot_policy_info_schedules_deleted':
+ xml = self.build_snapshot_policy_info_schedules_deleted()
+ elif self.type == 'snapshot_policy_info_modified_schedule_counts':
+ xml = self.build_snapshot_policy_info_modified_schedule_counts()
+ elif self.type == 'policy_fail':
+ raise netapp_utils.zapi.NaApiError(code='TEST', message="This exception is from the unit test")
+ self.xml_out = xml
+ return xml
+
+ def asup_log_for_cserver(self):
+ ''' mock autosupport log'''
+ return None
+
+ @staticmethod
+ def build_snapshot_policy_info():
+ ''' build xml data for snapshot-policy-info '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ data = {'num-records': 1,
+ 'attributes-list': {
+ 'snapshot-policy-info': {
+ 'comment': 'new comment',
+ 'enabled': 'true',
+ 'policy': 'ansible',
+ 'snapshot-policy-schedules': {
+ 'snapshot-schedule-info': {
+ 'count': 100,
+ 'schedule': 'hourly',
+ 'prefix': 'hourly',
+ 'snapmirror-label': ''
+ }
+ },
+ 'vserver-name': 'hostname'
+ }
+ }}
+ xml.translate_struct(data)
+ return xml
+
+ @staticmethod
+ def build_snapshot_policy_info_comment_modified():
+ ''' build xml data for snapshot-policy-info '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ data = {'num-records': 1,
+ 'attributes-list': {
+ 'snapshot-policy-info': {
+ 'comment': 'modified comment',
+ 'enabled': 'true',
+ 'policy': 'ansible',
+ 'snapshot-policy-schedules': {
+ 'snapshot-schedule-info': {
+ 'count': 100,
+ 'schedule': 'hourly',
+ 'prefix': 'hourly',
+ 'snapmirror-label': ''
+ }
+ },
+ 'vserver-name': 'hostname'
+ }
+ }}
+ xml.translate_struct(data)
+ return xml
+
+ @staticmethod
+ def build_snapshot_policy_info_policy_disabled():
+ ''' build xml data for snapshot-policy-info '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ data = {'num-records': 1,
+ 'attributes-list': {
+ 'snapshot-policy-info': {
+ 'comment': 'new comment',
+ 'enabled': 'false',
+ 'policy': 'ansible',
+ 'snapshot-policy-schedules': {
+ 'snapshot-schedule-info': {
+ 'count': 100,
+ 'schedule': 'hourly',
+ 'prefix': 'hourly',
+ 'snapmirror-label': ''
+ }
+ },
+ 'vserver-name': 'hostname'
+ }
+ }}
+ xml.translate_struct(data)
+ return xml
+
+ @staticmethod
+ def build_snapshot_policy_info_schedules_added():
+ ''' build xml data for snapshot-policy-info '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ data = {'num-records': 1,
+ 'attributes-list': {
+ 'snapshot-policy-info': {
+ 'comment': 'new comment',
+ 'enabled': 'true',
+ 'policy': 'ansible',
+ 'snapshot-policy-schedules': [
+ {
+ 'snapshot-schedule-info': {
+ 'count': 100,
+ 'schedule': 'hourly',
+ 'prefix': 'hourly',
+ 'snapmirror-label': ''
+ }
+ },
+ {
+ 'snapshot-schedule-info': {
+ 'count': 5,
+ 'schedule': 'daily',
+ 'prefix': 'daily',
+ 'snapmirror-label': 'daily'
+ }
+ },
+ {
+ 'snapshot-schedule-info': {
+ 'count': 10,
+ 'schedule': 'weekly',
+ 'prefix': 'weekly',
+ 'snapmirror-label': ''
+ }
+ }
+ ],
+ 'vserver-name': 'hostname'
+ }
+ }}
+ xml.translate_struct(data)
+ return xml
+
+ @staticmethod
+ def build_snapshot_policy_info_schedules_deleted():
+ ''' build xml data for snapshot-policy-info '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ data = {'num-records': 1,
+ 'attributes-list': {
+ 'snapshot-policy-info': {
+ 'comment': 'new comment',
+ 'enabled': 'true',
+ 'policy': 'ansible',
+ 'snapshot-policy-schedules': [
+ {
+ 'snapshot-schedule-info': {
+ 'schedule': 'daily',
+ 'prefix': 'daily',
+ 'count': 5,
+ 'snapmirror-label': 'daily'
+ }
+ }
+ ],
+ 'vserver-name': 'hostname'
+ }
+ }}
+ xml.translate_struct(data)
+ return xml
+
+ @staticmethod
+ def build_snapshot_policy_info_modified_schedule_counts():
+ ''' build xml data for snapshot-policy-info '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ data = {'num-records': 1,
+ 'attributes-list': {
+ 'snapshot-policy-info': {
+ 'comment': 'new comment',
+ 'enabled': 'true',
+ 'policy': 'ansible',
+ 'snapshot-policy-schedules': [
+ {
+ 'snapshot-schedule-info': {
+ 'count': 10,
+ 'schedule': 'hourly',
+ 'prefix': 'hourly',
+ 'snapmirror-label': ''
+ }
+ },
+ {
+ 'snapshot-schedule-info': {
+ 'count': 50,
+ 'schedule': 'daily',
+ 'prefix': 'daily',
+ 'snapmirror-label': 'daily'
+ }
+ },
+ {
+ 'snapshot-schedule-info': {
+ 'count': 100,
+ 'schedule': 'weekly',
+ 'prefix': 'weekly',
+ 'snapmirror-label': ''
+ }
+ }
+ ],
+ 'vserver-name': 'hostname'
+ }
+ }}
+ xml.translate_struct(data)
+ return xml
+
+
+class TestMyModule(unittest.TestCase):
+ ''' a group of related Unit Tests '''
+
+ def setUp(self):
+ self.server = MockONTAPConnection()
+ self.onbox = False
+
+ def set_default_args(self):
+ if self.onbox:
+ hostname = '10.10.10.10'
+ username = 'admin'
+ password = '1234'
+ name = 'ansible'
+ enabled = True
+ count = 100
+ schedule = 'hourly'
+ prefix = 'hourly'
+ comment = 'new comment'
+ else:
+ hostname = 'hostname'
+ username = 'username'
+ password = 'password'
+ name = 'ansible'
+ enabled = True
+ count = 100
+ schedule = 'hourly'
+ prefix = 'hourly'
+ comment = 'new comment'
+ return dict({
+ 'hostname': hostname,
+ 'username': username,
+ 'password': password,
+ 'name': name,
+ 'enabled': enabled,
+ 'count': count,
+ 'schedule': schedule,
+ 'prefix': prefix,
+ 'comment': comment,
+ 'use_rest': 'never'
+ })
+
+ def set_default_current(self):
+ default_args = self.set_default_args()
+ return dict({
+ 'name': default_args['name'],
+ 'enabled': default_args['enabled'],
+ 'count': [default_args['count']],
+ 'schedule': [default_args['schedule']],
+ 'snapmirror_label': [''],
+ 'prefix': [default_args['prefix']],
+ 'comment': default_args['comment'],
+ 'vserver': default_args['hostname']
+ })
+
+ def test_module_fail_when_required_args_missing(self):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ my_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+ def test_ensure_get_called(self):
+ ''' test get_snapshot_policy() for non-existent snapshot policy'''
+ set_module_args(self.set_default_args())
+ my_obj = my_module()
+ my_obj.server = self.server
+ assert my_obj.get_snapshot_policy() is None
+
+ def test_ensure_get_called_existing(self):
+ ''' test get_snapshot_policy() for existing snapshot policy'''
+ set_module_args(self.set_default_args())
+ my_obj = my_module()
+ my_obj.server = MockONTAPConnection(kind='policy')
+ assert my_obj.get_snapshot_policy()
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_snapshot_policy.NetAppOntapSnapshotPolicy.create_snapshot_policy')
+ def test_successful_create(self, create_snapshot):
+ ''' creating snapshot policy and testing idempotency '''
+ set_module_args(self.set_default_args())
+ my_obj = my_module()
+ my_obj.asup_log_for_cserver = Mock(return_value=None)
+ if not self.onbox:
+ my_obj.server = self.server
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+ create_snapshot.assert_called_with()
+ # to reset na_helper from remembering the previous 'changed' value
+ my_obj = my_module()
+ my_obj.asup_log_for_cserver = Mock(return_value=None)
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('policy')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_snapshot_policy.NetAppOntapSnapshotPolicy.modify_snapshot_policy')
+ def test_successful_modify_comment(self, modify_snapshot):
+ ''' modifying snapshot policy comment and testing idempotency '''
+ data = self.set_default_args()
+ data['comment'] = 'modified comment'
+ set_module_args(data)
+ my_obj = my_module()
+ my_obj.asup_log_for_cserver = Mock(return_value=None)
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('policy')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+ current = self.set_default_current()
+ modify_snapshot.assert_called_with(current)
+ # to reset na_helper from remembering the previous 'changed' value
+ my_obj = my_module()
+ my_obj.asup_log_for_cserver = Mock(return_value=None)
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('snapshot_policy_info_comment_modified')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_snapshot_policy.NetAppOntapSnapshotPolicy.modify_snapshot_policy')
+ def test_successful_disable_policy(self, modify_snapshot):
+ ''' disabling snapshot policy and testing idempotency '''
+ data = self.set_default_args()
+ data['enabled'] = False
+ set_module_args(data)
+ my_obj = my_module()
+ my_obj.asup_log_for_cserver = Mock(return_value=None)
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('policy')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+ current = self.set_default_current()
+ modify_snapshot.assert_called_with(current)
+ # to reset na_helper from remembering the previous 'changed' value
+ my_obj = my_module()
+ my_obj.asup_log_for_cserver = Mock(return_value=None)
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('snapshot_policy_info_policy_disabled')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_snapshot_policy.NetAppOntapSnapshotPolicy.modify_snapshot_policy')
+ def test_successful_enable_policy(self, modify_snapshot):
+ ''' enabling snapshot policy and testing idempotency '''
+ data = self.set_default_args()
+ data['enabled'] = True
+ set_module_args(data)
+ my_obj = my_module()
+ my_obj.asup_log_for_cserver = Mock(return_value=None)
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('snapshot_policy_info_policy_disabled')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+ current = self.set_default_current()
+ current['enabled'] = False
+ modify_snapshot.assert_called_with(current)
+ # to reset na_helper from remembering the previous 'changed' value
+ my_obj = my_module()
+ my_obj.asup_log_for_cserver = Mock(return_value=None)
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('policy')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_snapshot_policy.NetAppOntapSnapshotPolicy.modify_snapshot_policy')
+ def test_successful_modify_schedules_add(self, modify_snapshot):
+ ''' adding snapshot policy schedules and testing idempotency '''
+ data = self.set_default_args()
+ data['schedule'] = ['hourly', 'daily', 'weekly']
+ data['prefix'] = ['hourly', 'daily', 'weekly']
+ data['count'] = [100, 5, 10]
+ data['snapmirror_label'] = ['', 'daily', '']
+ set_module_args(data)
+ my_obj = my_module()
+ my_obj.asup_log_for_cserver = Mock(return_value=None)
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('policy')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+ current = self.set_default_current()
+ modify_snapshot.assert_called_with(current)
+ # to reset na_helper from remembering the previous 'changed' value
+ my_obj = my_module()
+ my_obj.asup_log_for_cserver = Mock(return_value=None)
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('snapshot_policy_info_schedules_added')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_snapshot_policy.NetAppOntapSnapshotPolicy.modify_snapshot_policy')
+ def test_successful_modify_schedules_delete(self, modify_snapshot):
+ ''' deleting snapshot policy schedules and testing idempotency '''
+ data = self.set_default_args()
+ data['schedule'] = ['daily']
+ data['prefix'] = ['daily']
+ data['count'] = [5]
+ set_module_args(data)
+ my_obj = my_module()
+ my_obj.asup_log_for_cserver = Mock(return_value=None)
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('policy')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+ current = self.set_default_current()
+ modify_snapshot.assert_called_with(current)
+ # to reset na_helper from remembering the previous 'changed' value
+ my_obj = my_module()
+ my_obj.asup_log_for_cserver = Mock(return_value=None)
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('snapshot_policy_info_schedules_deleted')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_snapshot_policy.NetAppOntapSnapshotPolicy.modify_snapshot_policy')
+ def test_successful_modify_schedules(self, modify_snapshot):
+ ''' modifying snapshot policy schedule counts and testing idempotency '''
+ data = self.set_default_args()
+ data['schedule'] = ['hourly', 'daily', 'weekly']
+ data['count'] = [10, 50, 100]
+ data['prefix'] = ['hourly', 'daily', 'weekly']
+ set_module_args(data)
+ my_obj = my_module()
+ my_obj.asup_log_for_cserver = Mock(return_value=None)
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('policy')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+ current = self.set_default_current()
+ modify_snapshot.assert_called_with(current)
+ # to reset na_helper from remembering the previous 'changed' value
+ my_obj = my_module()
+ my_obj.asup_log_for_cserver = Mock(return_value=None)
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('snapshot_policy_info_modified_schedule_counts')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_snapshot_policy.NetAppOntapSnapshotPolicy.delete_snapshot_policy')
+ def test_successful_delete(self, delete_snapshot):
+ ''' deleting snapshot policy and testing idempotency '''
+ data = self.set_default_args()
+ data['state'] = 'absent'
+ set_module_args(data)
+ my_obj = my_module()
+ my_obj.asup_log_for_cserver = Mock(return_value=None)
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('policy')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+ delete_snapshot.assert_called_with()
+ # to reset na_helper from remembering the previous 'changed' value
+ my_obj = my_module()
+ my_obj.asup_log_for_cserver = Mock(return_value=None)
+ if not self.onbox:
+ my_obj.server = self.server
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert not exc.value.args[0]['changed']
+
+ def test_valid_schedule_count(self):
+ ''' validate when schedule has same number of elements '''
+ data = self.set_default_args()
+ data['schedule'] = ['hourly', 'daily', 'weekly', 'monthly', '5min']
+ data['prefix'] = ['hourly', 'daily', 'weekly', 'monthly', '5min']
+ data['count'] = [1, 2, 3, 4, 5]
+ set_module_args(data)
+ my_obj = my_module()
+ my_obj.asup_log_for_cserver = Mock(return_value=None)
+ if not self.onbox:
+ my_obj.server = self.server
+ my_obj.create_snapshot_policy()
+ create_xml = my_obj.server.xml_in
+ assert data['count'][2] == int(create_xml['count3'])
+ assert data['schedule'][4] == create_xml['schedule5']
+
+ def test_valid_schedule_count_with_snapmirror_labels(self):
+ ''' validate when schedule has same number of elements with snapmirror labels '''
+ data = self.set_default_args()
+ data['schedule'] = ['hourly', 'daily', 'weekly', 'monthly', '5min']
+ data['prefix'] = ['hourly', 'daily', 'weekly', 'monthly', '5min']
+ data['count'] = [1, 2, 3, 4, 5]
+ data['snapmirror_label'] = ['hourly', 'daily', 'weekly', 'monthly', '5min']
+ set_module_args(data)
+ my_obj = my_module()
+ my_obj.asup_log_for_cserver = Mock(return_value=None)
+ if not self.onbox:
+ my_obj.server = self.server
+ my_obj.create_snapshot_policy()
+ create_xml = my_obj.server.xml_in
+ assert data['count'][2] == int(create_xml['count3'])
+ assert data['schedule'][4] == create_xml['schedule5']
+ assert data['snapmirror_label'][3] == create_xml['snapmirror-label4']
+
+ def test_invalid_params(self):
+ ''' validate error when schedule does not have same number of elements '''
+ data = self.set_default_args()
+ data['schedule'] = ['s1', 's2']
+ data['prefix'] = ['s1', 's2']
+ data['count'] = [1, 2, 3]
+ set_module_args(data)
+ my_obj = my_module()
+ my_obj.asup_log_for_cserver = Mock(return_value=None)
+ if not self.onbox:
+ my_obj.server = self.server
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.create_snapshot_policy()
+ msg = 'Error: A Snapshot policy must have at least 1 ' \
+ 'schedule and can have up to a maximum of 5 schedules, with a count ' \
+ 'representing the maximum number of Snapshot copies for each schedule'
+ assert exc.value.args[0]['msg'] == msg
+
+ def test_invalid_schedule_count(self):
+ ''' validate error when schedule has more than 5 elements '''
+ data = self.set_default_args()
+ data['schedule'] = ['s1', 's2', 's3', 's4', 's5', 's6']
+ data['count'] = [1, 2, 3, 4, 5, 6]
+ set_module_args(data)
+ my_obj = my_module()
+ my_obj.asup_log_for_cserver = Mock(return_value=None)
+ if not self.onbox:
+ my_obj.server = self.server
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.create_snapshot_policy()
+ msg = 'Error: A Snapshot policy must have at least 1 ' \
+ 'schedule and can have up to a maximum of 5 schedules, with a count ' \
+ 'representing the maximum number of Snapshot copies for each schedule'
+ assert exc.value.args[0]['msg'] == msg
+
+ def test_invalid_schedule_count_less_than_one(self):
+ ''' validate error when schedule has less than 1 element '''
+ data = self.set_default_args()
+ data['schedule'] = []
+ data['count'] = []
+ set_module_args(data)
+ my_obj = my_module()
+ my_obj.asup_log_for_cserver = Mock(return_value=None)
+ if not self.onbox:
+ my_obj.server = self.server
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.create_snapshot_policy()
+ msg = 'Error: A Snapshot policy must have at least 1 ' \
+ 'schedule and can have up to a maximum of 5 schedules, with a count ' \
+ 'representing the maximum number of Snapshot copies for each schedule'
+ assert exc.value.args[0]['msg'] == msg
+
+ def test_invalid_schedule_count_is_none(self):
+ ''' validate error when schedule is None '''
+ data = self.set_default_args()
+ data['schedule'] = None
+ data['count'] = None
+ set_module_args(data)
+ my_obj = my_module()
+ my_obj.asup_log_for_cserver = Mock(return_value=None)
+ if not self.onbox:
+ my_obj.server = self.server
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.create_snapshot_policy()
+ msg = 'Error: A Snapshot policy must have at least 1 ' \
+ 'schedule and can have up to a maximum of 5 schedules, with a count ' \
+ 'representing the maximum number of Snapshot copies for each schedule'
+ assert exc.value.args[0]['msg'] == msg
+
+ def test_invalid_schedule_count_with_snapmirror_labels(self):
+ ''' validate error when schedule with snapmirror labels does not have same number of elements '''
+ data = self.set_default_args()
+ data['schedule'] = ['s1', 's2', 's3']
+ data['count'] = [1, 2, 3]
+ data['snapmirror_label'] = ['sm1', 'sm2']
+ set_module_args(data)
+ my_obj = my_module()
+ my_obj.asup_log_for_cserver = Mock(return_value=None)
+ if not self.onbox:
+ my_obj.server = self.server
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.create_snapshot_policy()
+ msg = 'Error: Each Snapshot Policy schedule must have an accompanying SnapMirror Label'
+ assert exc.value.args[0]['msg'] == msg
+
+ def test_invalid_schedule_count_with_prefixes(self):
+ ''' validate error when schedule with prefixes does not have same number of elements '''
+ data = self.set_default_args()
+ data['schedule'] = ['s1', 's2', 's3']
+ data['count'] = [1, 2, 3]
+ data['prefix'] = ['s1', 's2']
+ set_module_args(data)
+ my_obj = my_module()
+ my_obj.asup_log_for_cserver = Mock(return_value=None)
+ if not self.onbox:
+ my_obj.server = self.server
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.create_snapshot_policy()
+ msg = 'Error: Each Snapshot Policy schedule must have an accompanying prefix'
+ assert exc.value.args[0]['msg'] == msg
+
+ def test_if_all_methods_catch_exception(self):
+ module_args = {}
+ module_args.update(self.set_default_args())
+ set_module_args(module_args)
+ my_obj = my_module()
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('policy_fail')
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.create_snapshot_policy()
+ assert 'Error creating snapshot policy ansible:' in exc.value.args[0]['msg']
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.delete_snapshot_policy()
+ assert 'Error deleting snapshot policy ansible:' in exc.value.args[0]['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_snapshot_policy_rest.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_snapshot_policy_rest.py
new file mode 100644
index 000000000..b79507759
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_snapshot_policy_rest.py
@@ -0,0 +1,481 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+""" unit tests for Ansible module: na_ontap_snapshot_policy """
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ patch_ansible, create_module, create_and_apply, expect_and_capture_ansible_exception, AnsibleFailJson
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke, register_responses, get_mock_record
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_snapshot_policy \
+ import NetAppOntapSnapshotPolicy as my_module
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+SRR = rest_responses({
+ 'snapshot_record': (200, {"records": [
+ {
+ "svm": {
+ "uuid": "671aa46e-11ad-11ec-a267-005056b30cfa",
+ "name": "ansibleSVM"
+ },
+ "comment": "modified comment",
+ "enabled": True,
+ "name": "policy_name",
+ "copies": [
+ {
+ "count": 10,
+ "schedule": {
+ "name": "hourly"
+ },
+ "prefix": 'hourly',
+ "snapmirror_label": ''
+ },
+ {
+ "count": 30,
+ "schedule": {
+ "name": "weekly"
+ },
+ "prefix": 'weekly',
+ "snapmirror_label": ''
+ }
+ ],
+ "uuid": "1cd8a442-86d1-11e0-ae1c-123478563412"
+ }
+ ],
+ "num_records": 1
+ }, None),
+ 'schedule_record': (200, {"records": [
+ {
+ "svm": {
+ "uuid": "671aa46e-11ad-11ec-a267-005056b30cfa",
+ "name": "ansibleSVM"
+ },
+ "comment": "modified comment",
+ "enabled": 'true',
+ "name": "policy_name",
+ "count": 10,
+ "prefix": "hourly",
+ "snapmirror_label": '',
+ "schedule": {
+ "name": "hourly",
+ "uuid": "671aa46e-11ad-11ec-a267-005056b30cfa"
+ },
+ "uuid": "1cd8a442-86d1-11e0-ae1c-123478563412"
+ },
+ {
+ "svm": {
+ "uuid": "671aa46e-11ad-11ec-a267-005056b30cfa",
+ "name": "ansibleSVM"
+ },
+ "comment": "modified comment",
+ "enabled": 'true',
+ "name": "policy_name",
+ "count": 30,
+ "prefix": "weekly",
+ "snapmirror_label": '',
+ "schedule": {
+ "name": "weekly",
+ "uuid": "671aa46e-11ad-11ec-a267-005056b30dsa"
+ },
+ "uuid": "1cd8a442-86d1-11e0-ae1c-123478563412"
+ }
+ ], "num_records": 2}, None),
+})
+
+
+ARGS_REST = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'name': 'policy_name',
+ 'vserver': 'ansibleSVM',
+ 'enabled': True,
+ 'count': [10, 30],
+ 'schedule': "hourly,weekly",
+ 'comment': 'modified comment',
+ 'use_rest': 'always'
+}
+
+ARGS_REST_no_SVM = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'name': 'policy_name',
+ 'enabled': True,
+ 'count': [10, 30],
+ 'schedule': "hourly,weekly",
+ 'comment': 'modified comment',
+ 'use_rest': 'always'
+}
+
+
+def test_error_get_snapshot_policy_rest():
+ ''' Test get error with rest API'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/snapshot-policies', SRR['generic_error']),
+ ])
+ error = create_and_apply(my_module, ARGS_REST, fail=True)['msg']
+ assert 'Error on fetching snapshot policy:' in error
+
+
+def test_error_get_snapshot_schedule_rest():
+ ''' Test get error with rest API'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/snapshot-policies', SRR['snapshot_record']),
+ ('PATCH', 'storage/snapshot-policies/1cd8a442-86d1-11e0-ae1c-123478563412', SRR['empty_good']),
+ ('GET', 'storage/snapshot-policies/1cd8a442-86d1-11e0-ae1c-123478563412/schedules', SRR['generic_error'])
+ ])
+ module_args = {
+ 'enabled': False,
+ 'comment': 'testing policy',
+ 'name': 'policy2'
+ }
+ error = create_and_apply(my_module, ARGS_REST, module_args, fail=True)['msg']
+ assert 'Error on fetching snapshot schedule:' in error
+
+
+def test_module_error_ontap_version():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96'])
+ ])
+ module_args = {'use_rest': 'always'}
+ msg = create_module(my_module, ARGS_REST, module_args, fail=True)['msg']
+ assert 'Error: REST requires ONTAP 9.8 or later for snapshot schedules.' == msg
+
+
+def test_create_snapshot_polciy_rest():
+ ''' Test create with rest API'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/snapshot-policies', SRR['empty_records']),
+ ('POST', 'storage/snapshot-policies', SRR['empty_good']),
+ ])
+ assert create_and_apply(my_module, ARGS_REST)
+
+
+def test_create_snapshot_polciy_with_snapmirror_label_rest():
+ ''' Test create with rest API'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/snapshot-policies', SRR['empty_records']),
+ ('POST', 'storage/snapshot-policies', SRR['empty_good']),
+ ])
+ module_args = {
+ "snapmirror_label": ['hourly', 'weekly']
+ }
+ assert create_and_apply(my_module, ARGS_REST, module_args)
+
+
+def test_create_snapshot_polciy_with_prefix_rest():
+ ''' Test create with rest API'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/snapshot-policies', SRR['empty_records']),
+ ('POST', 'storage/snapshot-policies', SRR['empty_good']),
+ ])
+ module_args = {
+ "prefix": ['', '']
+ }
+ assert create_and_apply(my_module, ARGS_REST, module_args)
+
+
+def test_error_create_snapshot_polciy_rest():
+ ''' Test error create with rest API'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/snapshot-policies', SRR['empty_records']),
+ ('POST', 'storage/snapshot-policies', SRR['generic_error']),
+ ])
+ error = create_and_apply(my_module, ARGS_REST, fail=True)['msg']
+ assert 'Error on creating snapshot policy:' in error
+
+
+def test_delete_snapshot_policy_rest():
+ ''' Test delete with rest API'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/snapshot-policies', SRR['snapshot_record']),
+ ('DELETE', 'storage/snapshot-policies/1cd8a442-86d1-11e0-ae1c-123478563412', SRR['empty_good']),
+ ])
+ module_args = {
+ 'state': 'absent'
+ }
+ assert create_and_apply(my_module, ARGS_REST, module_args)
+
+
+def test_error_delete_snapshot_policy_rest():
+ ''' Test error delete with rest API'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/snapshot-policies', SRR['snapshot_record']),
+ ('DELETE', 'storage/snapshot-policies/1cd8a442-86d1-11e0-ae1c-123478563412', SRR['generic_error']),
+ ])
+ module_args = {
+ 'state': 'absent'
+ }
+ error = create_and_apply(my_module, ARGS_REST, module_args, fail=True)['msg']
+ assert 'Error on deleting snapshot policy:' in error
+
+
+def test_modify_snapshot_policy_rest():
+ ''' Test modify comment, rename and disable policy with rest API '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/snapshot-policies', SRR['snapshot_record']),
+ ('PATCH', 'storage/snapshot-policies/1cd8a442-86d1-11e0-ae1c-123478563412', SRR['empty_good']),
+ ('GET', 'storage/snapshot-policies/1cd8a442-86d1-11e0-ae1c-123478563412/schedules', SRR['schedule_record'])
+ ])
+ module_args = {
+ 'enabled': False,
+ 'comment': 'testing policy',
+ 'name': 'policy2'
+ }
+ assert create_and_apply(my_module, ARGS_REST, module_args)
+
+
+def test_error_modify_snapshot_policy_rest():
+ ''' Neagtive test - modify snapshot policy with rest API '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/snapshot-policies', SRR['snapshot_record']),
+ ('PATCH', 'storage/snapshot-policies/1cd8a442-86d1-11e0-ae1c-123478563412', SRR['generic_error']),
+ ])
+ module_args = {
+ 'enabled': 'no'
+ }
+ error = create_and_apply(my_module, ARGS_REST, module_args, fail=True)['msg']
+ assert 'Error on modifying snapshot policy:' in error
+
+
+def test_modify_snapshot_schedule_rest():
+ ''' Test modify snapshot schedule and disable policy with rest API '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/snapshot-policies', SRR['snapshot_record']),
+ ('PATCH', 'storage/snapshot-policies/1cd8a442-86d1-11e0-ae1c-123478563412', SRR['empty_good']),
+ ('GET', 'storage/snapshot-policies/1cd8a442-86d1-11e0-ae1c-123478563412/schedules', SRR['schedule_record']),
+ ('PATCH', 'storage/snapshot-policies/1cd8a442-86d1-11e0-ae1c-123478563412/schedules/671aa46e-11ad-11ec-a267-005056b30dsa', SRR['empty_good'])
+ ])
+ module_args = {
+ "enabled": False,
+ "count": ['10', '20'],
+ "schedule": ['hourly', 'weekly']
+ }
+ assert create_and_apply(my_module, ARGS_REST, module_args)
+
+
+def test_modify_snapshot_schedule_count_label_rest():
+ ''' Test modify snapmirror_label and count with rest API '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/snapshot-policies', SRR['snapshot_record']),
+ ('GET', 'storage/snapshot-policies/1cd8a442-86d1-11e0-ae1c-123478563412/schedules', SRR['schedule_record']),
+ ('PATCH', 'storage/snapshot-policies/1cd8a442-86d1-11e0-ae1c-123478563412/schedules/671aa46e-11ad-11ec-a267-005056b30dsa', SRR['empty_good'])
+ ])
+ module_args = {
+ "snapmirror_label": ['', 'weekly'],
+ "count": [10, 20],
+ "schedule": ['hourly', 'weekly']
+ }
+ assert create_and_apply(my_module, ARGS_REST, module_args)
+
+
+def test_modify_snapshot_schedule_count_rest():
+ ''' Test modify snapshot count with rest API '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/snapshot-policies', SRR['snapshot_record']),
+ ('GET', 'storage/snapshot-policies/1cd8a442-86d1-11e0-ae1c-123478563412/schedules', SRR['schedule_record']),
+ ('PATCH', 'storage/snapshot-policies/1cd8a442-86d1-11e0-ae1c-123478563412/schedules/671aa46e-11ad-11ec-a267-005056b30dsa', SRR['empty_good'])
+ ])
+ module_args = {
+ "count": "10,40",
+ "schedule": ['hourly', 'weekly'],
+ "snapmirror_label": ['', '']
+ }
+ assert create_and_apply(my_module, ARGS_REST, module_args)
+
+
+def test_modify_snapshot_count_rest():
+ ''' Test modify snapshot count, snapmirror_label and prefix with rest API '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/snapshot-policies', SRR['snapshot_record']),
+ ('GET', 'storage/snapshot-policies/1cd8a442-86d1-11e0-ae1c-123478563412/schedules', SRR['schedule_record']),
+ ('PATCH', 'storage/snapshot-policies/1cd8a442-86d1-11e0-ae1c-123478563412/schedules/671aa46e-11ad-11ec-a267-005056b30cfa', SRR['empty_good'])
+ ])
+ module_args = {
+ "count": "20,30",
+ "schedule": ['hourly', 'weekly'],
+ "snapmirror_label": ['hourly', ''],
+ "prefix": ['', 'weekly']
+ }
+ assert create_and_apply(my_module, ARGS_REST, module_args)
+
+
+def test_delete_snapshot_schedule_rest():
+ ''' Test delete snapshot schedule with rest API '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/snapshot-policies', SRR['snapshot_record']),
+ ('GET', 'storage/snapshot-policies/1cd8a442-86d1-11e0-ae1c-123478563412/schedules', SRR['schedule_record']),
+ ('DELETE', 'storage/snapshot-policies/1cd8a442-86d1-11e0-ae1c-123478563412/schedules/671aa46e-11ad-11ec-a267-005056b30cfa', SRR['empty_good'])
+ ])
+ module_args = {
+ "count": 30,
+ "schedule": ['weekly']
+ }
+ assert create_and_apply(my_module, ARGS_REST, module_args)
+
+
+def test_delete_all_snapshot_schedule_rest():
+ ''' Validate deleting all snapshot schedule with rest API '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/snapshot-policies', SRR['snapshot_record'])
+ ])
+ module_args = {
+ "count": [],
+ "schedule": []
+ }
+ msg = 'Error: A Snapshot policy must have at least 1 ' \
+ 'schedule and can have up to a maximum of 5 schedules, with a count ' \
+ 'representing the maximum number of Snapshot copies for each schedule'
+ error = create_and_apply(my_module, ARGS_REST, module_args, fail=True)['msg']
+ assert msg in error
+
+
+def test_add_snapshot_schedule_rest():
+ ''' Test modify by adding schedule to a snapshot with rest API '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/snapshot-policies', SRR['snapshot_record']),
+ ('GET', 'storage/snapshot-policies/1cd8a442-86d1-11e0-ae1c-123478563412/schedules', SRR['schedule_record']),
+ ('POST', 'storage/snapshot-policies/1cd8a442-86d1-11e0-ae1c-123478563412/schedules', SRR['empty_good']),
+ ('POST', 'storage/snapshot-policies/1cd8a442-86d1-11e0-ae1c-123478563412/schedules', SRR['success']),
+ ('POST', 'storage/snapshot-policies/1cd8a442-86d1-11e0-ae1c-123478563412/schedules', SRR['success'])
+ ])
+ module_args = {
+ "count": "10,30,20,1,2",
+ "schedule": ['hourly', 'weekly', 'daily', 'monthly', '5min'],
+ "snapmirror_label": ['', '', '', '', '']}
+ assert create_and_apply(my_module, ARGS_REST, module_args)
+
+
+def test_add_max_snapshot_schedule_rest():
+ ''' Test modify by adding more than maximum number of schedule to a snapshot with rest API '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'storage/snapshot-policies', SRR['snapshot_record'])
+ ])
+ module_args = {
+ "count": "10,30,20,1,2,3",
+ "schedule": ['hourly', 'weekly', 'daily', 'monthly', '5min', '10min'],
+ "snapmirror_label": ['', '', '', '', '', '']}
+ msg = 'Error: A Snapshot policy must have at least 1 ' \
+ 'schedule and can have up to a maximum of 5 schedules, with a count ' \
+ 'representing the maximum number of Snapshot copies for each schedule'
+ error = create_and_apply(my_module, ARGS_REST, module_args, fail=True)['msg']
+ assert msg in error
+
+
+def test_invalid_count_rest():
+ ''' Test invalid count for a schedule with rest API '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0'])
+ ])
+ current = {
+ 'schedule': 'weekly',
+ 'count': []}
+ my_module_object = create_module(my_module, ARGS_REST, current)
+ msg = 'Error: A Snapshot policy must have at least 1 ' \
+ 'schedule and can have up to a maximum of 5 schedules, with a count ' \
+ 'representing the maximum number of Snapshot copies for each schedule'
+ assert msg in expect_and_capture_ansible_exception(my_module_object.validate_parameters, 'fail')['msg']
+
+
+def test_validate_schedule_count_with_snapmirror_labels_rest():
+ ''' validate when schedule has same number of elements with snapmirror labels with rest API '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0'])
+ ])
+ current = {
+ 'schedule': ['hourly', 'daily', 'weekly', 'monthly', '5min'],
+ 'snapmirror_label': ['', '', ''],
+ 'count': [1, 2, 3, 4, 5]}
+ my_module_object = create_module(my_module, ARGS_REST, current)
+ msg = "Error: Each Snapshot Policy schedule must have an accompanying SnapMirror Label"
+ assert msg in expect_and_capture_ansible_exception(my_module_object.validate_parameters, 'fail')['msg']
+
+
+def test_validate_schedule_count_with_prefix_rest():
+ ''' validate when schedule has same number of elements with prefix with rest API '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0'])
+ ])
+ current = {
+ 'schedule': ['hourly', 'daily', 'weekly', 'monthly', '5min'],
+ 'prefix': ['hourly', 'daily', 'weekly'],
+ 'count': [1, 2, 3, 4, 5]}
+ my_module_object = create_module(my_module, ARGS_REST, current)
+ msg = "Error: Each Snapshot Policy schedule must have an accompanying prefix"
+ assert msg in expect_and_capture_ansible_exception(my_module_object.validate_parameters, 'fail')['msg']
+
+
+def test_validate_schedule_count_max_rest():
+ ''' Validate maximum number of snapshot schedule and count with REST API '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0'])
+ ])
+ current = {
+ 'schedule': ['hourly', 'daily', 'weekly', 'monthly', '5min', '10min'],
+ 'count': [1, 2, 3, 4, 5, 6]}
+ my_module_object = create_module(my_module, ARGS_REST, current)
+ msg = 'Error: A Snapshot policy must have at least 1 ' \
+ 'schedule and can have up to a maximum of 5 schedules, with a count ' \
+ 'representing the maximum number of Snapshot copies for each schedule'
+ assert msg in expect_and_capture_ansible_exception(my_module_object.validate_parameters, 'fail')['msg']
+
+
+def test_invalid_count_number_rest():
+ ''' validate when schedule has same number of elements with count with rest API '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0'])
+ ])
+ current = {
+ 'schedule': ['hourly', 'daily', 'weekly'],
+ 'count': [1, 2, 3, 4, 5, 6]
+ }
+ my_module_object = create_module(my_module, ARGS_REST, current)
+ msg = 'Error: A Snapshot policy must have at least 1 ' \
+ 'schedule and can have up to a maximum of 5 schedules, with a count ' \
+ 'representing the maximum number of Snapshot copies for each schedule'
+ assert msg in expect_and_capture_ansible_exception(my_module_object.validate_parameters, 'fail')['msg']
+
+
+def test_invalid_schedule_count_rest():
+ ''' validate invalid number of schedule and count with rest API '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0'])
+ ])
+ current = {
+ 'schedule': [],
+ 'count': []}
+ my_module_object = create_module(my_module, ARGS_REST, current)
+ msg = 'Error: A Snapshot policy must have at least 1 ' \
+ 'schedule and can have up to a maximum of 5 schedules, with a count ' \
+ 'representing the maximum number of Snapshot copies for each schedule'
+ assert msg in expect_and_capture_ansible_exception(my_module_object.validate_parameters, 'fail')['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_snmp.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_snmp.py
new file mode 100644
index 000000000..24d8c5da4
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_snmp.py
@@ -0,0 +1,158 @@
+# (c) 2018-2021, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test for ONTAP snmp Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+import sys
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import assert_no_warnings, set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_snmp \
+ import NetAppONTAPSnmp as my_module, main as uut_main # module under test
+
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+
+def default_args():
+ args = {
+ 'state': 'present',
+ 'hostname': '10.10.10.10',
+ 'username': 'admin',
+ 'https': 'true',
+ 'validate_certs': 'false',
+ 'password': 'password',
+ 'use_rest': 'always'
+ }
+ return args
+
+
+# REST API canned responses when mocking send_request
+SRR = {
+ # common responses
+ 'is_rest': (200, dict(version=dict(generation=9, major=9, minor=0, full='dummy')), None),
+ 'is_rest_9_6': (200, dict(version=dict(generation=9, major=6, minor=0, full='dummy')), None),
+ 'is_rest_9_8': (200, dict(version=dict(generation=9, major=8, minor=0, full='dummy')), None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'zero_record': (200, dict(records=[], num_records=0), None),
+ 'one_record_uuid': (200, dict(records=[dict(uuid='a1b2c3')], num_records=1), None),
+ 'end_of_sequence': (500, None, "Unexpected call to send_request"),
+ 'generic_error': (400, None, "Expected error"),
+ 'community_user_record': (200, {
+ 'records': [{
+ "name": "snmpv3user2",
+ "authentication_method": "community",
+ 'engine_id': "80000315058e02057c0fb8e911bc9f005056bb942e"
+ }],
+ 'num_records': 1
+ }, None),
+ 'snmp_user_record': (200, {
+ 'records': [{
+ "name": "snmpv3user3",
+ "authentication_method": "usm",
+ 'engine_id': "80000315058e02057c0fb8e911bc9f005056bb942e"
+ }],
+ 'num_records': 1
+ }, None),
+}
+
+
+def test_module_fail_when_required_args_missing(patch_ansible):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args(dict(hostname=''))
+ my_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+ msg = 'missing required arguments: community_name'
+ assert msg == exc.value.args[0]['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_ensure_get_community_called(mock_request, patch_ansible):
+ ''' test get'''
+ args = dict(default_args())
+ args['community_name'] = 'snmpv3user2'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['community_user_record'], # get
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is False
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_ensure_create_community_called(mock_request, patch_ansible):
+ ''' test get'''
+ args = dict(default_args())
+ args['community_name'] = 'snmpv3user2'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['zero_record'], # get
+ SRR['empty_good'], # create
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is True
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_ensure_delete_community_called(mock_request, patch_ansible):
+ ''' test get'''
+ args = dict(default_args())
+ args['community_name'] = 'snmpv3user2'
+ args['state'] = 'absent'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['community_user_record'], # get
+ SRR['community_user_record'],
+ SRR['empty_good'], # delete
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is True
+ assert_no_warnings()
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_ensure_delete_community_idempotent(mock_request, patch_ansible):
+ ''' test get'''
+ args = dict(default_args())
+ args['community_name'] = 'snmpv3user2'
+ args['state'] = 'absent'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest_9_8'], # get version
+ SRR['zero_record'], # get
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ print('Info: %s' % exc.value.args[0])
+ assert exc.value.args[0]['changed'] is False
+ assert_no_warnings()
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_snmp_traphosts.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_snmp_traphosts.py
new file mode 100644
index 000000000..43b9624bb
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_snmp_traphosts.py
@@ -0,0 +1,153 @@
+# (c) 2020-2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+""" unit tests for Ansible module: na_ontap_snmp_traphosts """
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ patch_ansible, create_module, create_and_apply, expect_and_capture_ansible_exception, AnsibleFailJson
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke, register_responses, get_mock_record
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_snmp_traphosts \
+ import NetAppONTAPSnmpTraphosts as traphost_module # module under test
+
+# REST API canned responses when mocking send_request
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+# REST API canned responses when mocking send_request
+SRR = rest_responses({
+ # module specific responses
+ 'snmp_record': (
+ 200,
+ {
+ "records": [
+ {
+ "host": "example.com",
+ }
+ ],
+ "num_records": 1
+ }, None
+ ),
+ "no_record": (
+ 200,
+ {"num_records": 0},
+ None)
+})
+
+
+ARGS_REST = {
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'use_rest': 'always',
+ 'host': 'example.com'
+}
+
+
+def test_rest_error_get():
+ '''Test error rest get'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'support/snmp/traphosts', SRR['generic_error']),
+ ])
+ error = create_and_apply(traphost_module, ARGS_REST, fail=True)['msg']
+ msg = "Error on fetching snmp traphosts info:"
+ assert msg in error
+
+
+def test_rest_create():
+ '''Test create snmp traphost'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'support/snmp/traphosts', SRR['empty_records']),
+ ('POST', 'support/snmp/traphosts', SRR['empty_good']),
+ ])
+ assert create_and_apply(traphost_module, ARGS_REST)
+
+
+def test_rest_error_create():
+ '''Test error create snmp traphost'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'support/snmp/traphosts', SRR['empty_records']),
+ ('POST', 'support/snmp/traphosts', SRR['generic_error']),
+ ])
+ error = create_and_apply(traphost_module, ARGS_REST, fail=True)['msg']
+ msg = "Error creating traphost:"
+ assert msg in error
+
+
+def test_rest_delete():
+ '''Test delete snmp traphost'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'support/snmp/traphosts', SRR['snmp_record']),
+ ('DELETE', 'support/snmp/traphosts/example.com', SRR['empty_good']),
+ ])
+ module_args = {
+ 'state': 'absent'
+ }
+ assert create_and_apply(traphost_module, ARGS_REST, module_args)
+
+
+def test_rest_error_delete():
+ '''Test error delete snmp traphost'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'support/snmp/traphosts', SRR['snmp_record']),
+ ('DELETE', 'support/snmp/traphosts/example.com', SRR['generic_error']),
+ ])
+ module_args = {
+ 'state': 'absent'
+ }
+ error = create_and_apply(traphost_module, ARGS_REST, module_args, fail=True)['msg']
+ msg = "Error deleting traphost:"
+ assert msg in error
+
+
+def test_create_idempotent_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'support/snmp/traphosts', SRR['snmp_record'])
+ ])
+ module_args = {
+ 'state': 'present'
+ }
+ assert not create_and_apply(traphost_module, ARGS_REST, module_args)['changed']
+
+
+def test_delete_idempotent_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'support/snmp/traphosts', SRR['empty_records'])
+ ])
+ module_args = {
+ 'state': 'absent'
+ }
+ assert not create_and_apply(traphost_module, ARGS_REST, module_args)['changed']
+
+
+def test_ontap_version_rest():
+ ''' Test ONTAP version '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ])
+ module_args = {'use_rest': 'always'}
+ error = create_module(traphost_module, ARGS_REST, module_args, fail=True)['msg']
+ msg = "Error: na_ontap_snmp_traphosts only supports REST, and requires ONTAP 9.7.0 or later."
+ assert msg in error
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_software_update.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_software_update.py
new file mode 100644
index 000000000..40bf3e851
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_software_update.py
@@ -0,0 +1,1124 @@
+# (c) 2018-2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests ONTAP Ansible module: na_ontap_software_update '''
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+import pytest
+import sys
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import\
+ assert_warning_was_raised, expect_and_capture_ansible_exception, call_main, create_module, create_and_apply, patch_ansible, print_warnings
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import\
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import JOB_GET_API, rest_error_message, rest_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_error, build_zapi_response, zapi_error_message, zapi_responses
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_software_update \
+ import NetAppONTAPSoftwareUpdate as my_module, main as my_main # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+
+def cluster_image_info(mixed=False):
+ version1 = 'Fattire__9.3.0'
+ version2 = version1
+ if mixed:
+ version2 += '.1'
+ return {
+ 'num-records': 1,
+ # composite response, attributes-list for cluster-image-get-iter and attributes for cluster-image-get
+ 'attributes-list': [
+ {'cluster-image-info': {
+ 'node-id': 'node4test',
+ 'current-version': version1}},
+ {'cluster-image-info': {
+ 'node-id': 'node4test',
+ 'current-version': version2}},
+ ],
+ 'attributes': {
+ 'cluster-image-info': {
+ 'node-id': 'node4test',
+ 'current-version': version1
+ }},
+ }
+
+
+def software_update_info(status):
+ if status == 'async_pkg_get_phase_complete':
+ overall_status = 'completed'
+ elif status == 'async_pkg_get_phase_running':
+ overall_status = 'in_progress'
+ else:
+ overall_status = status
+
+ return {
+ 'num-records': 1,
+ # 'attributes-list': {'cluster-image-info': {'node-id': node}},
+ 'progress-status': status,
+ 'progress-details': 'some_details',
+ 'failure-reason': 'failure_reason',
+ 'attributes': {
+ 'ndu-progress-info': {
+ 'overall-status': overall_status,
+ 'completed-node-count': '0',
+ 'validation-reports': [{
+ 'validation-report-info': {
+ 'one_check': 'one',
+ 'two_check': 'two'
+ }}]}},
+ }
+
+
+cluster_image_validation_report_list = {
+ 'cluster-image-validation-report-list': [
+ {'cluster-image-validation-report-list-info': {
+ 'required-action': {
+ 'required-action-info': {
+ 'action': 'some_action',
+ 'advice': 'some_advice',
+ 'error': 'some_error',
+ }
+ },
+ 'ndu-check': 'ndu_ck',
+ 'ndu-status': 'ndu_st',
+ }},
+ {'cluster-image-validation-report-list-info': {
+ 'required-action': {
+ 'required-action-info': {
+ 'action': 'other_action',
+ 'advice': 'other_advice',
+ 'error': 'other_error',
+ }
+ },
+ 'ndu-check': 'ndu_ck',
+ 'ndu-status': 'ndu_st',
+ }},
+ ],
+}
+
+
+cluster_image_package_local_info = {
+ 'attributes-list': [
+ {'cluster-image-package-local-info': {
+ 'package-version': 'Fattire__9.3.0',
+
+ }},
+ {'cluster-image-package-local-info': {
+ 'package-version': 'Fattire__9.3.1',
+
+ }},
+ ],
+}
+
+
+ZRR = zapi_responses({
+ 'cluster_image_info': build_zapi_response(cluster_image_info()),
+ 'cluster_image_info_mixed': build_zapi_response(cluster_image_info(True)),
+ 'software_update_info_running': build_zapi_response(software_update_info('async_pkg_get_phase_running')),
+ 'software_update_info_complete': build_zapi_response(software_update_info('async_pkg_get_phase_complete')),
+ 'software_update_info_error': build_zapi_response(software_update_info('error')),
+ 'cluster_image_validation_report_list': build_zapi_response(cluster_image_validation_report_list),
+ 'cluster_image_package_local_info': build_zapi_response(cluster_image_package_local_info, 2),
+ 'error_18408': build_zapi_error(18408, 'pkg exists!')
+})
+
+
+def cluster_software_node_info(mixed=False):
+ version1 = 'Fattire__9.3.0'
+ version2 = 'GEN_MAJ_min_2' if mixed else version1
+ return {
+ 'nodes': [
+ {'name': 'node1', 'version': version1},
+ {'name': 'node2', 'version': version2},
+ ]
+ }
+
+
+def cluster_software_state_info(state):
+ # state: in_progress, completed, ...
+ return {
+ 'state': state
+ }
+
+
+cluster_software_validation_results = {
+ "validation_results": [{
+ "action": {
+ "message": "Use NFS hard mounts, if possible."
+ },
+ "issue": {
+ "message": "Cluster HA is not configured in the cluster."
+ },
+ "status": "warning",
+ "update_check": "nfs_mounts"
+ }],
+}
+
+
+def cluster_software_download_info(state):
+ return {
+ "message": "message",
+ "state": state,
+ }
+
+
+SRR = rest_responses({
+ 'cluster_software_node_info': (200, cluster_software_node_info(), None),
+ 'cluster_software_node_info_mixed': (200, cluster_software_node_info(True), None),
+ 'cluster_software_validation_results': (200, cluster_software_validation_results, None),
+ 'cluster_software_state_completed': (200, cluster_software_state_info('completed'), None),
+ 'cluster_software_state_in_progress': (200, cluster_software_state_info('in_progress'), None),
+ 'cluster_software_state_in_error': (200, cluster_software_state_info('in_error'), None),
+ 'cluster_software_download_state_success': (200, cluster_software_download_info('success'), None),
+ 'cluster_software_download_state_running': (200, cluster_software_download_info('running'), None),
+ 'cluster_software_package_info_ft': (200, {'records': [{'version': 'Fattire__9.3.0'}]}, None),
+ 'cluster_software_package_info_pte': (200, {'records': [{'version': 'PlinyTheElder'}]}, None),
+ 'error_image_already_exists': (200, {}, 'Package image with the same name already exists'),
+ 'error_download_in_progress': (200, {}, 'Software get operation already in progress'),
+})
+
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'package_version': 'Fattire__9.3.0',
+ 'package_url': 'abc.com',
+ 'https': 'true',
+ 'stabilize_minutes': 10
+}
+
+
+@patch('time.sleep')
+def test_ensure_apply_for_update_called(dont_sleep):
+ register_responses([
+ ('ZAPI', 'cluster-image-package-local-get-iter', ZRR['no_records']),
+ ('ZAPI', 'cluster-image-get-iter', ZRR['cluster_image_info']),
+ ('ZAPI', 'cluster-image-package-download', ZRR['success']),
+ ('ZAPI', 'cluster-image-get-download-progress', ZRR['software_update_info_running']),
+ ('ZAPI', 'cluster-image-get-download-progress', ZRR['software_update_info_running']),
+ ('ZAPI', 'cluster-image-get-download-progress', ZRR['software_update_info_complete']),
+ ('ZAPI', 'cluster-image-update', ZRR['success']),
+ ('ZAPI', 'cluster-image-update-progress-info', ZRR['software_update_info_complete']),
+ ('ZAPI', 'cluster-image-package-delete', ZRR['success']),
+ ])
+ module_args = {
+ "use_rest": "never",
+ "package_version": "PlinyTheElder",
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+@patch('time.sleep')
+def test_ensure_apply_for_update_called_node(dont_sleep):
+ register_responses([
+ ('ZAPI', 'cluster-image-package-local-get-iter', ZRR['no_records']),
+ ('ZAPI', 'cluster-image-get', ZRR['cluster_image_info']),
+ ('ZAPI', 'cluster-image-package-download', ZRR['success']),
+ ('ZAPI', 'cluster-image-get-download-progress', ZRR['software_update_info_running']),
+ ('ZAPI', 'cluster-image-get-download-progress', ZRR['software_update_info_running']),
+ ('ZAPI', 'cluster-image-get-download-progress', ZRR['software_update_info_complete']),
+ ('ZAPI', 'cluster-image-update', ZRR['success']),
+ ('ZAPI', 'cluster-image-update-progress-info', ZRR['software_update_info_complete']),
+ ('ZAPI', 'cluster-image-package-delete', ZRR['success']),
+ ])
+ module_args = {
+ "use_rest": "never",
+ "nodes": ["node_abc"],
+ "package_version": "PlinyTheElder",
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+@patch('time.sleep')
+def test_ensure_apply_for_update_called_idempotent(dont_sleep):
+ # image already installed
+ register_responses([
+ ('ZAPI', 'cluster-image-package-local-get-iter', ZRR['no_records']),
+ ('ZAPI', 'cluster-image-get-iter', ZRR['cluster_image_info']),
+
+ ])
+ module_args = {
+ "use_rest": "never",
+ }
+ assert not create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+@patch('time.sleep')
+def test_ensure_apply_for_update_called_idempotent_node(dont_sleep):
+ # image already installed
+ register_responses([
+ ('ZAPI', 'cluster-image-package-local-get-iter', ZRR['no_records']),
+ ('ZAPI', 'cluster-image-get', ZRR['cluster_image_info']),
+
+ ])
+ module_args = {
+ "use_rest": "never",
+ "nodes": ["node_abc"],
+ }
+ assert not create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+@patch('time.sleep')
+def test_ensure_apply_for_update_called_with_validation(dont_sleep):
+ # for validation before update
+ register_responses([
+ ('ZAPI', 'cluster-image-package-local-get-iter', ZRR['no_records']),
+ ('ZAPI', 'cluster-image-get-iter', ZRR['cluster_image_info']),
+ ('ZAPI', 'cluster-image-package-download', ZRR['success']),
+ ('ZAPI', 'cluster-image-get-download-progress', ZRR['software_update_info_running']),
+ ('ZAPI', 'cluster-image-get-download-progress', ZRR['software_update_info_running']),
+ ('ZAPI', 'cluster-image-get-download-progress', ZRR['software_update_info_complete']),
+ ('ZAPI', 'cluster-image-validate', ZRR['success']),
+ ('ZAPI', 'cluster-image-update', ZRR['success']),
+ ('ZAPI', 'cluster-image-update-progress-info', ZRR['software_update_info_complete']),
+ ('ZAPI', 'cluster-image-package-delete', ZRR['success']),
+ ])
+ module_args = {
+ "use_rest": "never",
+ "package_version": "PlinyTheElder",
+ "validate_after_download": True,
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+@patch('time.sleep')
+def test_negative_download_error(dont_sleep):
+ ''' downloading software - error while downloading the image - first request '''
+ register_responses([
+ ('ZAPI', 'cluster-image-package-local-get-iter', ZRR['no_records']),
+ ('ZAPI', 'cluster-image-get-iter', ZRR['cluster_image_info']),
+ ('ZAPI', 'cluster-image-package-download', ZRR['error']),
+ ])
+ module_args = {
+ "use_rest": "never",
+ "package_version": "PlinyTheElder",
+ }
+ error = zapi_error_message('Error downloading cluster image package for abc.com')
+ assert error in create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+@patch('time.sleep')
+def test_negative_download_progress_error(dont_sleep):
+ ''' downloading software - error while downloading the image - progress error '''
+ register_responses([
+ ('ZAPI', 'cluster-image-package-local-get-iter', ZRR['no_records']),
+ ('ZAPI', 'cluster-image-get-iter', ZRR['cluster_image_info']),
+ ('ZAPI', 'cluster-image-package-download', ZRR['success']),
+ ('ZAPI', 'cluster-image-get-download-progress', ZRR['software_update_info_running']),
+ ('ZAPI', 'cluster-image-get-download-progress', ZRR['software_update_info_running']),
+ ('ZAPI', 'cluster-image-get-download-progress', ZRR['software_update_info_error']),
+ ])
+ module_args = {
+ "use_rest": "never",
+ "package_version": "PlinyTheElder",
+ }
+ error = 'Error downloading package: failure_reason'
+ assert error in create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+@patch('time.sleep')
+def test_negative_download_progress_error_no_status(dont_sleep):
+ ''' downloading software - error while downloading the image - progress error '''
+ register_responses([
+ ('ZAPI', 'cluster-image-package-local-get-iter', ZRR['no_records']),
+ ('ZAPI', 'cluster-image-get-iter', ZRR['cluster_image_info']),
+ ('ZAPI', 'cluster-image-package-download', ZRR['success']),
+ ('ZAPI', 'cluster-image-get-download-progress', ZRR['software_update_info_running']),
+ ('ZAPI', 'cluster-image-get-download-progress', ZRR['software_update_info_running']),
+ ('ZAPI', 'cluster-image-get-download-progress', ZRR['success']), # retrying if status cannot be found
+ ('ZAPI', 'cluster-image-get-download-progress', ZRR['success']),
+ ('ZAPI', 'cluster-image-get-download-progress', ZRR['success']),
+ ('ZAPI', 'cluster-image-get-download-progress', ZRR['software_update_info_error']),
+ ])
+ module_args = {
+ "use_rest": "never",
+ "package_version": "PlinyTheElder",
+ }
+ error = 'Error downloading package: failure_reason'
+ assert error in create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+@patch('time.sleep')
+def test_negative_download_progress_error_fetching_status(dont_sleep):
+ ''' downloading software - error while downloading the image - progress error '''
+ register_responses([
+ ('ZAPI', 'cluster-image-package-local-get-iter', ZRR['no_records']),
+ ('ZAPI', 'cluster-image-get-iter', ZRR['cluster_image_info']),
+ ('ZAPI', 'cluster-image-package-download', ZRR['success']),
+ ('ZAPI', 'cluster-image-get-download-progress', ZRR['software_update_info_running']),
+ ('ZAPI', 'cluster-image-get-download-progress', ZRR['software_update_info_running']),
+ ('ZAPI', 'cluster-image-get-download-progress', ZRR['error']),
+ ])
+ module_args = {
+ "use_rest": "never",
+ "package_version": "PlinyTheElder",
+ }
+ error = zapi_error_message('Error fetching cluster image package download progress for abc.com')
+ assert error in create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+@patch('time.sleep')
+def test_negative_update_error_zapi(dont_sleep):
+ ''' updating software - error while updating the image '''
+ register_responses([
+ ('ZAPI', 'cluster-image-package-local-get-iter', ZRR['no_records']),
+ ('ZAPI', 'cluster-image-get-iter', ZRR['cluster_image_info']),
+ ('ZAPI', 'cluster-image-package-download', ZRR['success']),
+ ('ZAPI', 'cluster-image-get-download-progress', ZRR['software_update_info_running']),
+ ('ZAPI', 'cluster-image-get-download-progress', ZRR['software_update_info_running']),
+ ('ZAPI', 'cluster-image-get-download-progress', ZRR['software_update_info_complete']),
+ ('ZAPI', 'cluster-image-update', ZRR['error']),
+ ('ZAPI', 'cluster-image-update-progress-info', ZRR['error']), # additional error details
+ ('ZAPI', 'cluster-image-validate', ZRR['error']), # additional error details
+ ])
+ module_args = {
+ "use_rest": "never",
+ "package_version": "PlinyTheElder",
+ }
+ error = zapi_error_message('Error updating cluster image for PlinyTheElder')
+ assert error in create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+@patch('time.sleep')
+def test_negative_update_error(dont_sleep):
+ ''' updating software - error while updating the image '''
+ register_responses([
+ ('ZAPI', 'cluster-image-package-local-get-iter', ZRR['no_records']),
+ ('ZAPI', 'cluster-image-get-iter', ZRR['cluster_image_info']),
+ ('ZAPI', 'cluster-image-package-download', ZRR['success']),
+ ('ZAPI', 'cluster-image-get-download-progress', ZRR['software_update_info_running']),
+ ('ZAPI', 'cluster-image-get-download-progress', ZRR['software_update_info_running']),
+ ('ZAPI', 'cluster-image-get-download-progress', ZRR['software_update_info_complete']),
+ ('ZAPI', 'cluster-image-update', ZRR['success']),
+ ('ZAPI', 'cluster-image-update-progress-info', ZRR['software_update_info_error']),
+ ('ZAPI', 'cluster-image-update-progress-info', ZRR['software_update_info_error']),
+ ])
+ module_args = {
+ "use_rest": "never",
+ "package_version": "PlinyTheElder",
+ }
+ error = 'Error updating image using ZAPI: overall_status: error.'
+ assert error in create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+@patch('time.sleep')
+def test_negative_update_error_timeout(dont_sleep):
+ ''' updating software - error while updating the image '''
+ register_responses([
+ ('ZAPI', 'cluster-image-package-local-get-iter', ZRR['no_records']),
+ ('ZAPI', 'cluster-image-get-iter', ZRR['cluster_image_info']),
+ ('ZAPI', 'cluster-image-package-download', ZRR['success']),
+ ('ZAPI', 'cluster-image-get-download-progress', ZRR['software_update_info_running']),
+ ('ZAPI', 'cluster-image-get-download-progress', ZRR['software_update_info_running']),
+ ('ZAPI', 'cluster-image-get-download-progress', ZRR['software_update_info_complete']),
+ ('ZAPI', 'cluster-image-update', ZRR['success']),
+ ('ZAPI', 'cluster-image-update-progress-info', ZRR['software_update_info_error']),
+ ('ZAPI', 'cluster-image-update-progress-info', ZRR['software_update_info_running']),
+ ])
+ module_args = {
+ "use_rest": "never",
+ "package_version": "PlinyTheElder",
+ }
+ error = 'Timeout error updating image using ZAPI: overall_status: in_progress. Should the timeout value be increased?'\
+ ' Current value is 1800 seconds. The software update continues in background.'
+ assert error in create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.has_netapp_lib')
+def test_fail_netapp_lib_error(mock_has_netapp_lib):
+ mock_has_netapp_lib.return_value = False
+ module_args = {
+ "use_rest": "never"
+ }
+ assert 'Error: the python NetApp-Lib module is required. Import error: None' == call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_fail_with_http():
+ args = dict(DEFAULT_ARGS)
+ args.pop('https')
+ assert 'Error: https parameter must be True' == call_main(my_main, args, fail=True)['msg']
+
+
+def test_is_update_required():
+ ''' update is required if nodes have different images, or version does not match '''
+ register_responses([
+ ('ZAPI', 'cluster-image-get-iter', ZRR['cluster_image_info']),
+ ('ZAPI', 'cluster-image-get-iter', ZRR['cluster_image_info_mixed']),
+ ('ZAPI', 'cluster-image-get-iter', ZRR['cluster_image_info']),
+ ('ZAPI', 'cluster-image-get-iter', ZRR['cluster_image_info_mixed']),
+ ])
+ module_args = {
+ "use_rest": "never"
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert not my_obj.is_update_required()
+ assert my_obj.is_update_required()
+ my_obj.parameters["package_version"] = "PlinyTheElder"
+ assert my_obj.is_update_required()
+ assert my_obj.is_update_required()
+
+
+def test_cluster_image_validate():
+ ''' check error, then check that reports are read correctly '''
+ register_responses([
+ ('ZAPI', 'cluster-image-validate', ZRR['error']),
+ ('ZAPI', 'cluster-image-validate', ZRR['cluster_image_validation_report_list']),
+ ])
+ module_args = {
+ "use_rest": "never"
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.cluster_image_validate() == zapi_error_message('Error running cluster image validate')
+ reports = my_obj.cluster_image_validate()
+ assert 'required_action' in reports[0]
+ assert 'action' in reports[0]['required_action']
+ assert reports[0]['required_action']['action'] == 'some_action'
+ assert reports[1]['required_action']['action'] == 'other_action'
+
+
+def test_cluster_image_zapi_errors():
+ ''' ZAPi error on delete '''
+ register_responses([
+ ('ZAPI', 'cluster-image-get-iter', ZRR['error']),
+ ('ZAPI', 'cluster-image-get', ZRR['error']),
+ ('ZAPI', 'cluster-image-package-delete', ZRR['error']),
+ ('ZAPI', 'cluster-image-package-local-get-iter', ZRR['error']),
+ ])
+ module_args = {
+ "use_rest": "never"
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert expect_and_capture_ansible_exception(my_obj.cluster_image_get_versions, 'fail')['msg'] ==\
+ zapi_error_message('Error fetching cluster image details: Fattire__9.3.0')
+ assert expect_and_capture_ansible_exception(my_obj.cluster_image_get_for_node, 'fail', 'node')['msg'] ==\
+ zapi_error_message('Error fetching cluster image details for node')
+ assert expect_and_capture_ansible_exception(my_obj.cluster_image_package_delete, 'fail')['msg'] ==\
+ zapi_error_message('Error deleting cluster image package for Fattire__9.3.0')
+ assert expect_and_capture_ansible_exception(my_obj.cluster_image_packages_get_zapi, 'fail')['msg'] ==\
+ zapi_error_message('Error getting list of local packages')
+
+
+def test_cluster_image_get_for_node_none_none():
+ ''' empty response on get '''
+ register_responses([
+ ('ZAPI', 'cluster-image-get', ZRR['success']),
+ ])
+ module_args = {
+ "use_rest": "never"
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.cluster_image_get_for_node('node') == (None, None)
+
+
+def test_cluster_image_package_download():
+ ''' ZAPI error on download - package already exists'''
+ register_responses([
+ ('ZAPI', 'cluster-image-package-download', ZRR['error']),
+ ('ZAPI', 'cluster-image-package-download', ZRR['error_18408']),
+ ('ZAPI', 'cluster-image-package-local-get-iter', ZRR['cluster_image_package_local_info']),
+ ('ZAPI', 'cluster-image-package-download', ZRR['success']),
+ ])
+ module_args = {
+ "use_rest": "never"
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert expect_and_capture_ansible_exception(my_obj.cluster_image_package_download, 'fail')['msg'] ==\
+ zapi_error_message('Error downloading cluster image package for abc.com')
+ assert my_obj.cluster_image_package_download()
+ assert not my_obj.cluster_image_package_download()
+
+
+def test_cluster_image_update_progress_get_error():
+ ''' ZAPI error on progress get '''
+ register_responses([
+ ('ZAPI', 'cluster-image-update-progress-info', ZRR['error']),
+ ('ZAPI', 'cluster-image-update-progress-info', ZRR['error']),
+ ('ZAPI', 'cluster-image-update-progress-info', ZRR['error']),
+ ])
+ module_args = {
+ "use_rest": "never"
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert expect_and_capture_ansible_exception(my_obj.cluster_image_update_progress_get, 'fail', ignore_connection_error=False)['msg'] ==\
+ zapi_error_message('Error fetching cluster image update progress details')
+ assert my_obj.cluster_image_update_progress_get() == {}
+ assert my_obj.cluster_image_update_progress_get(ignore_connection_error=True) == {}
+
+
+def test_delete_package_zapi():
+ # deleting a package
+ register_responses([
+ ('ZAPI', 'cluster-image-package-local-get-iter', ZRR['cluster_image_package_local_info']),
+ ('ZAPI', 'cluster-image-package-delete', ZRR['success']),
+ # idempotency
+ ('ZAPI', 'cluster-image-package-local-get-iter', ZRR['no_records']),
+ ])
+ module_args = {
+ "use_rest": "never",
+ "state": "absent",
+ "package_version": "Fattire__9.3.0",
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+# REST tests
+
+@patch('time.sleep')
+def test_rest_ensure_apply_for_update_called(dont_sleep):
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster/software/packages', SRR['zero_records']),
+ ('GET', 'cluster/software', SRR['cluster_software_node_info']),
+ ('POST', 'cluster/software/download', SRR['success_with_job_uuid']),
+ ('GET', JOB_GET_API, SRR['job_generic_response_running']),
+ ('GET', JOB_GET_API, SRR['job_generic_response_running']),
+ ('GET', JOB_GET_API, SRR['generic_error']),
+ ('GET', JOB_GET_API, SRR['job_generic_response_success']),
+ ('PATCH', 'cluster/software', SRR['success_with_job_uuid']),
+ ('GET', JOB_GET_API, SRR['job_generic_response_running']),
+ ('GET', JOB_GET_API, SRR['generic_error']),
+ ('GET', JOB_GET_API, SRR['job_generic_response_running']),
+ ('GET', JOB_GET_API, SRR['job_generic_response_success']),
+ ('GET', 'cluster/software', SRR['cluster_software_state_in_progress']),
+ ('GET', 'cluster/software', SRR['cluster_software_state_in_progress']),
+ ('GET', 'cluster/software', SRR['cluster_software_state_in_progress']),
+ ('GET', 'cluster/software', SRR['cluster_software_state_completed']),
+ ('GET', 'cluster/software', SRR['cluster_software_validation_results']),
+ ('DELETE', 'cluster/software/packages/PlinyTheElder', SRR['success_with_job_uuid']),
+ ('GET', JOB_GET_API, SRR['job_generic_response_running']),
+ ('GET', JOB_GET_API, SRR['job_generic_response_running']),
+ ('GET', JOB_GET_API, SRR['job_generic_response_success']),
+ ])
+ module_args = {
+ "use_rest": "always",
+ "package_version": "PlinyTheElder",
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+@patch('time.sleep')
+def test_rest_ensure_apply_for_update_called_idempotent(dont_sleep):
+ # image already installed
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster/software/packages', SRR['zero_records']),
+ ('GET', 'cluster/software', SRR['cluster_software_node_info']),
+
+ ])
+ module_args = {
+ "use_rest": "always",
+ }
+ assert not create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+@patch('time.sleep')
+def test_rest_ensure_apply_for_update_called_with_validation(dont_sleep):
+ # for validation before update
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster/software/packages', SRR['zero_records']),
+ ('GET', 'cluster/software', SRR['cluster_software_node_info']),
+ ('POST', 'cluster/software/download', SRR['success']),
+ ('PATCH', 'cluster/software', SRR['success']),
+ ('GET', 'cluster/software', SRR['cluster_software_validation_results']),
+ ('PATCH', 'cluster/software', SRR['success']),
+ ('GET', 'cluster/software', SRR['cluster_software_state_in_progress']),
+ ('GET', 'cluster/software', SRR['cluster_software_state_in_progress']),
+ ('GET', 'cluster/software', SRR['generic_error']),
+ ('GET', 'cluster/software', SRR['cluster_software_state_in_progress']),
+ ('GET', 'cluster/software', SRR['generic_error']),
+ ('GET', 'cluster/software', SRR['generic_error']),
+ ('GET', 'cluster/software', SRR['cluster_software_state_completed']),
+ ('GET', 'cluster/software', SRR['cluster_software_validation_results']),
+ ('DELETE', 'cluster/software/packages/PlinyTheElder', SRR['success']),
+ ])
+ module_args = {
+ "use_rest": "always",
+ "package_version": "PlinyTheElder",
+ "validate_after_download": True,
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+@patch('time.sleep')
+def test_rest_download_idempotent_package_already_exist_pre(dont_sleep):
+ ''' downloading software - package already present before attempting download '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster/software/packages', SRR['cluster_software_package_info_pte']),
+ ('GET', 'cluster/software', SRR['cluster_software_node_info']),
+ ])
+ module_args = {
+ "use_rest": "always",
+ "package_version": "PlinyTheElder",
+ "download_only": True,
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+@patch('time.sleep')
+def test_rest_download_idempotent_package_already_exist_post(dont_sleep):
+ ''' downloading software - package already present when attempting download '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster/software/packages', SRR['zero_records']),
+ ('GET', 'cluster/software', SRR['cluster_software_node_info']),
+ ('POST', 'cluster/software/download', SRR['error_image_already_exists']),
+ ('GET', 'cluster/software/packages', SRR['cluster_software_package_info_pte']),
+ ])
+ module_args = {
+ "use_rest": "always",
+ "package_version": "PlinyTheElder",
+ "download_only": True,
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+@patch('time.sleep')
+def test_rest_download_already_in_progress(dont_sleep):
+ ''' downloading software - package already present when attempting download '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster/software/packages', SRR['zero_records']),
+ ('GET', 'cluster/software', SRR['cluster_software_node_info']),
+ ('POST', 'cluster/software/download', SRR['error_download_in_progress']),
+ ('GET', 'cluster/software/download', SRR['cluster_software_download_state_running']),
+ ('GET', 'cluster/software/download', SRR['generic_error']),
+ ('GET', 'cluster/software/download', SRR['generic_error']),
+ ('GET', 'cluster/software/download', SRR['cluster_software_download_state_running']),
+ ('GET', 'cluster/software/download', SRR['cluster_software_download_state_success']),
+ ])
+ module_args = {
+ "use_rest": "always",
+ "package_version": "PlinyTheElder",
+ "download_only": True,
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+@patch('time.sleep')
+def test_rest_negative_download_package_already_exist(dont_sleep):
+ ''' downloading software - error while downloading the image - first request '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster/software/packages', SRR['zero_records']),
+ ('GET', 'cluster/software', SRR['cluster_software_node_info']),
+ ('POST', 'cluster/software/download', SRR['error_image_already_exists']),
+ ('GET', 'cluster/software/packages', SRR['cluster_software_package_info_ft']),
+ ])
+ module_args = {
+ "use_rest": "always",
+ "package_version": "PlinyTheElder",
+ "download_only": True,
+ }
+ error = 'Error: another package with the same file name exists: found: Fattire__9.3.0'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+@patch('time.sleep')
+def test_rest_negative_download_error(dont_sleep):
+ ''' downloading software - error while downloading the image - first request '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster/software/packages', SRR['zero_records']),
+ ('GET', 'cluster/software', SRR['cluster_software_node_info']),
+ ('POST', 'cluster/software/download', SRR['generic_error']),
+ ])
+ module_args = {
+ "use_rest": "always",
+ "package_version": "PlinyTheElder",
+ }
+ error = rest_error_message('Error downloading software', 'cluster/software/download', ' - current versions:')
+ assert error in create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+@patch('time.sleep')
+def test_rest_negative_download_progress_error(dont_sleep):
+ ''' downloading software - error while downloading the image - progress error '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster/software/packages', SRR['zero_records']),
+ ('GET', 'cluster/software', SRR['cluster_software_node_info']),
+ ('POST', 'cluster/software/download', SRR['success_with_job_uuid']),
+ ('GET', JOB_GET_API, SRR['job_generic_response_running']),
+ ('GET', JOB_GET_API, SRR['job_generic_response_running']),
+ ('GET', JOB_GET_API, SRR['job_generic_response_failure']),
+ ])
+ module_args = {
+ "use_rest": "always",
+ "package_version": "PlinyTheElder",
+ }
+ error = 'Error downloading software: job reported error: job reported failure, received'
+ assert error in create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+@patch('time.sleep')
+def test_rest_negative_update_error_sync(dont_sleep):
+ ''' updating software - error while updating the image '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster/software/packages', SRR['zero_records']),
+ ('GET', 'cluster/software', SRR['cluster_software_node_info']),
+ ('POST', 'cluster/software/download', SRR['success']),
+ ('PATCH', 'cluster/software', SRR['generic_error']),
+ ('GET', 'cluster/software', SRR['cluster_software_validation_results']),
+ # second error on validate results
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster/software/packages', SRR['zero_records']),
+ ('GET', 'cluster/software', SRR['cluster_software_node_info']),
+ ('POST', 'cluster/software/download', SRR['success']),
+ ('PATCH', 'cluster/software', SRR['generic_error']),
+ ('GET', 'cluster/software', SRR['generic_error']),
+ ])
+ module_args = {
+ "use_rest": "always",
+ "package_version": "PlinyTheElder",
+ }
+ error = rest_error_message('Error updating software', 'cluster/software')
+ msg = create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert error in msg
+ assert 'validation results:' in msg
+ assert "'issue': {'message': 'Cluster HA is not configured in the cluster.'}" in msg
+ # seconnd error on validate results
+ msg = create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert error in msg
+ assert 'validation results:' in msg
+ assert 'validation results: Error fetching software information for validation_results:' in msg
+
+
+@patch('time.sleep')
+def test_rest_negative_update_error_waiting_for_state(dont_sleep):
+ ''' updating software - error while updating the image '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster/software/packages', SRR['zero_records']),
+ ('GET', 'cluster/software', SRR['cluster_software_node_info']),
+ ('POST', 'cluster/software/download', SRR['success']),
+ ('PATCH', 'cluster/software', SRR['success']),
+ ('GET', 'cluster/software', SRR['cluster_software_state_in_progress']),
+ ('GET', 'cluster/software', SRR['generic_error']),
+ ('GET', 'cluster/software', SRR['generic_error']),
+ ('GET', 'cluster/software', SRR['generic_error']),
+ ('GET', 'cluster/software', SRR['generic_error']),
+ # over 20 consecutive errors
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster/software/packages', SRR['zero_records']),
+ ('GET', 'cluster/software', SRR['cluster_software_node_info']),
+ ('POST', 'cluster/software/download', SRR['success']),
+ ('PATCH', 'cluster/software', SRR['success']),
+ ('GET', 'cluster/software', SRR['cluster_software_state_in_progress']),
+ ('GET', 'cluster/software', SRR['generic_error']),
+ ('GET', 'cluster/software', SRR['generic_error']),
+ ('GET', 'cluster/software', SRR['generic_error']),
+ ('GET', 'cluster/software', SRR['generic_error']),
+ ('GET', 'cluster/software', SRR['generic_error']),
+ ('GET', 'cluster/software', SRR['generic_error']),
+ ('GET', 'cluster/software', SRR['generic_error']),
+ ('GET', 'cluster/software', SRR['generic_error']),
+ ('GET', 'cluster/software', SRR['generic_error']),
+ ('GET', 'cluster/software', SRR['generic_error']),
+ ('GET', 'cluster/software', SRR['generic_error']),
+ ('GET', 'cluster/software', SRR['generic_error']),
+ ('GET', 'cluster/software', SRR['generic_error']),
+ ('GET', 'cluster/software', SRR['generic_error']),
+ ('GET', 'cluster/software', SRR['generic_error']),
+ ('GET', 'cluster/software', SRR['generic_error']),
+ ('GET', 'cluster/software', SRR['generic_error']),
+ ('GET', 'cluster/software', SRR['generic_error']),
+ ('GET', 'cluster/software', SRR['generic_error']),
+ ('GET', 'cluster/software', SRR['generic_error']),
+ ])
+ module_args = {
+ "use_rest": "always",
+ "package_version": "PlinyTheElder",
+ "timeout": 240
+ }
+ error = rest_error_message('Error: unable to read image update state, using timeout 240. '
+ 'Last error: Error fetching software information for state', 'cluster/software')
+ msg = create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert error in msg
+ assert 'All errors:' in msg
+ module_args = {
+ "use_rest": "always",
+ "package_version": "PlinyTheElder",
+ "timeout": 1800
+ }
+ # stop after 20 errors
+ error = rest_error_message('Error: unable to read image update state, using timeout 1800. '
+ 'Last error: Error fetching software information for state', 'cluster/software')
+ msg = create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert error in msg
+ assert 'All errors:' in msg
+
+
+@patch('time.sleep')
+def test_rest_negative_update_error_job_errors(dont_sleep):
+ ''' updating software - error while updating the image '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster/software/packages', SRR['zero_records']),
+ ('GET', 'cluster/software', SRR['cluster_software_node_info']),
+ ('POST', 'cluster/software/download', SRR['success']),
+ ('PATCH', 'cluster/software', SRR['generic_error']),
+ ('GET', 'cluster/software', SRR['cluster_software_validation_results']),
+ # second error on validate results
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster/software/packages', SRR['zero_records']),
+ ('GET', 'cluster/software', SRR['cluster_software_node_info']),
+ ('POST', 'cluster/software/download', SRR['success']),
+ ('PATCH', 'cluster/software', SRR['generic_error']),
+ ('GET', 'cluster/software', SRR['generic_error']),
+ ])
+ module_args = {
+ "use_rest": "always",
+ "package_version": "PlinyTheElder",
+ }
+ error = rest_error_message('Error updating software', 'cluster/software')
+ msg = create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert error in msg
+ assert 'validation results:' in msg
+ assert "'issue': {'message': 'Cluster HA is not configured in the cluster.'}" in msg
+ # seconnd error on validate results
+ msg = create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert error in msg
+ assert 'validation results:' in msg
+ assert 'validation results: Error fetching software information for validation_results:' in msg
+
+
+def test_rest_is_update_required():
+ ''' update is required if nodes have different images, or version does not match '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster/software', SRR['cluster_software_node_info']),
+ ('GET', 'cluster/software', SRR['cluster_software_node_info_mixed']),
+ ('GET', 'cluster/software', SRR['cluster_software_node_info']),
+ ('GET', 'cluster/software', SRR['cluster_software_node_info_mixed']),
+ ])
+ module_args = {
+ "use_rest": "always"
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert not my_obj.is_update_required()
+ assert my_obj.is_update_required()
+ my_obj.parameters["package_version"] = "PlinyTheElder"
+ assert my_obj.is_update_required()
+ assert my_obj.is_update_required()
+
+
+@patch('time.sleep')
+def test_rest_cluster_image_validate(dont_sleep):
+ ''' check error, then check that reports are read correctly '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('PATCH', 'cluster/software', SRR['generic_error']),
+ ('PATCH', 'cluster/software', SRR['success']),
+ ('GET', 'cluster/software', SRR['zero_records']), # retried as validation_results is not present - empty record
+ ('GET', 'cluster/software', SRR['cluster_software_node_info']), # retried as validation_results is not present - other keys
+ ('GET', 'cluster/software', SRR['cluster_software_validation_results']),
+ ])
+ module_args = {
+ "use_rest": "always"
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.cluster_image_validate() == rest_error_message('Error validating software', 'cluster/software')
+ reports = my_obj.cluster_image_validate()
+ assert 'action' in reports[0]
+ assert 'issue' in reports[0]
+
+
+def test_rest_cluster_image_errors():
+ ''' REST error on get and delete '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster/software', SRR['generic_error']),
+ ('DELETE', 'cluster/software/packages/Fattire__9.3.0', SRR['generic_error']),
+ ])
+ module_args = {
+ "use_rest": "always"
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert expect_and_capture_ansible_exception(my_obj.cluster_image_get_versions, 'fail')['msg'] ==\
+ rest_error_message('Error fetching software information for nodes', 'cluster/software')
+ assert expect_and_capture_ansible_exception(my_obj.cluster_image_package_delete, 'fail')['msg'] ==\
+ rest_error_message('Error deleting cluster software package for Fattire__9.3.0', 'cluster/software/packages/Fattire__9.3.0')
+
+
+def test_rest_cluster_image_get_for_node_versions():
+ ''' getting nodes versions '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster/software', SRR['cluster_software_node_info']),
+ ('GET', 'cluster/software', SRR['cluster_software_node_info']),
+ ('GET', 'cluster/software', SRR['cluster_software_node_info']),
+ ('GET', 'cluster/software', SRR['cluster_software_node_info']),
+ ('GET', 'cluster/software', SRR['cluster_software_node_info']),
+ ])
+ module_args = {
+ "use_rest": "always"
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.cluster_image_get_rest('versions') == [('node1', 'Fattire__9.3.0'), ('node2', 'Fattire__9.3.0')]
+ my_obj.parameters['nodes'] = ['node1']
+ assert my_obj.cluster_image_get_rest('versions') == [('node1', 'Fattire__9.3.0')]
+ my_obj.parameters['nodes'] = ['node2']
+ assert my_obj.cluster_image_get_rest('versions') == [('node2', 'Fattire__9.3.0')]
+ my_obj.parameters['nodes'] = ['node2', 'node3']
+ error = 'Error: node not found in cluster: node3.'
+ assert expect_and_capture_ansible_exception(my_obj.cluster_image_get_rest, 'fail', 'versions')['msg'] == error
+ my_obj.parameters['nodes'] = ['node4', 'node3']
+ error = 'Error: nodes not found in cluster: node4, node3.'
+ assert expect_and_capture_ansible_exception(my_obj.cluster_image_get_rest, 'fail', 'versions')['msg'] == error
+
+
+def test_rest_negative_cluster_image_get_for_node_versions():
+ ''' getting nodes versions '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster/software', SRR['zero_records']),
+ ('GET', 'cluster/software', SRR['cluster_software_validation_results']),
+ ])
+ module_args = {
+ "use_rest": "always"
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ error = "Error fetching software information for nodes: no record calling cluster/software"
+ assert error in expect_and_capture_ansible_exception(my_obj.cluster_image_get_rest, 'fail', 'versions')['msg']
+ error = "Unexpected results for what: versions, record: {'validation_results':"
+ assert error in expect_and_capture_ansible_exception(my_obj.cluster_image_get_rest, 'fail', 'versions')['msg']
+
+
+def test_rest_cluster_image_package_download():
+ ''' download error, download error indicating package exists, successful download '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('POST', 'cluster/software/download', SRR['generic_error']),
+ ('POST', 'cluster/software/download', SRR['error_image_already_exists']),
+ ('GET', 'cluster/software/packages', SRR['zero_records']),
+ ('POST', 'cluster/software/download', SRR['success']),
+ ])
+ module_args = {
+ "use_rest": "always"
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ error = rest_error_message('Error downloading software', 'cluster/software/download', " - current versions: ['not available with force_update']")
+ assert error in expect_and_capture_ansible_exception(my_obj.download_software_rest, 'fail')['msg']
+ error = 'Error: ONTAP reported package already exists, but no package found: '
+ assert error in expect_and_capture_ansible_exception(my_obj.download_software_rest, 'fail')['msg']
+ assert not my_obj.download_software_rest()
+
+
+def test_rest_post_update_tasks():
+ ''' validate success and error messages '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster/software', SRR['cluster_software_validation_results']),
+ ('DELETE', 'cluster/software/packages/Fattire__9.3.0', SRR['success']),
+ ('GET', 'cluster/software', SRR['cluster_software_validation_results']),
+ ('GET', 'cluster/software', SRR['cluster_software_validation_results']),
+ ])
+ module_args = {
+ "use_rest": "always"
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.post_update_tasks_rest('completed') == cluster_software_validation_results['validation_results']
+ # time out
+ error = 'Timeout error updating image using REST: state: in_progress.'
+ assert error in expect_and_capture_ansible_exception(my_obj.post_update_tasks_rest, 'fail', 'in_progress')['msg']
+ # other state
+ error = 'Error updating image using REST: state: error_state.'
+ assert error in expect_and_capture_ansible_exception(my_obj.post_update_tasks_rest, 'fail', 'error_state')['msg']
+
+
+def test_rest_delete_package():
+ ''' deleting package '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster/software/packages', SRR['cluster_software_package_info_pte']),
+ ('DELETE', 'cluster/software/packages/PlinyTheElder', SRR['success']),
+ # idempotency
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster/software/packages', SRR['cluster_software_package_info_ft']),
+ ])
+ module_args = {
+ "use_rest": "always",
+ "package_version": "PlinyTheElder",
+ "state": "absent",
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_negative_delete_package():
+ ''' deleting package '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster/software/packages', SRR['generic_error']),
+ # idempotency
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster/software/packages', SRR['cluster_software_package_info_pte']),
+ ('DELETE', 'cluster/software/packages/PlinyTheElder', SRR['generic_error'])
+ ])
+ module_args = {
+ "use_rest": "always",
+ "package_version": "PlinyTheElder",
+ "state": "absent",
+ }
+ error = rest_error_message('Error: unable to fetch local package list', 'cluster/software/packages')
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ error = rest_error_message('Error deleting cluster software package for PlinyTheElder', 'cluster/software/packages/PlinyTheElder')
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_rest_partially_supported_options():
+ ''' validate success and error messages '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ])
+ module_args = {
+ "use_rest": "always",
+ }
+ error = 'Minimum version of ONTAP for stabilize_minutes is (9, 8)'
+ assert error in create_module(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert create_module(my_module, DEFAULT_ARGS, module_args)
+ module_args = {
+ "use_rest": "always",
+ "nodes": "node1"
+ }
+ error = 'Minimum version of ONTAP for nodes is (9, 9)'
+ assert error in create_module(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ module_args = {
+ "use_rest": "auto",
+ "nodes": "node1"
+ }
+ assert create_module(my_module, DEFAULT_ARGS, module_args)
+ print_warnings
+ assert_warning_was_raised('Falling back to ZAPI because of unsupported option(s) or option value(s) "nodes" in REST require (9, 9)')
+
+
+def test_missing_arg():
+ args = dict(DEFAULT_ARGS)
+ args.pop('package_url')
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster/software/packages', SRR['zero_records']),
+ ])
+ module_args = {
+ "use_rest": "always",
+ }
+ error = 'Error: packague_url is a required parameter to download the software package.'
+ assert error in call_main(my_main, args, module_args, fail=True)['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_storage_auto_giveback.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_storage_auto_giveback.py
new file mode 100644
index 000000000..3c6d345c1
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_storage_auto_giveback.py
@@ -0,0 +1,320 @@
+''' unit tests ONTAP Ansible module: na_ontap_storage_auto_giveback '''
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat import unittest
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch, Mock
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_storage_auto_giveback \
+ import NetAppOntapStorageAutoGiveback as storage_auto_giveback_module # module under test
+
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+# REST API canned responses when mocking send_request
+SRR = {
+ # common responses
+ 'is_rest': (200, {}, None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'end_of_sequence': (500, None, "Ooops, the UT needs one more SRR response"),
+ 'generic_error': (400, None, "Expected error"),
+ # module specific responses
+ 'storage_auto_giveback_enabled_record': (200, {
+ 'num_records': 1,
+ 'records': [{
+ 'node': 'node1',
+ 'auto_giveback': True,
+ 'auto_giveback_after_panic': True
+ }]
+ }, None),
+ 'storage_auto_giveback_disabled_record': (200, {
+ 'num_records': 1,
+ "records": [{
+ 'node': 'node1',
+ 'auto_giveback': False,
+ 'auto_giveback_after_panic': False
+ }]
+ }, None)
+}
+
+
+class MockONTAPConnection(object):
+ ''' mock server connection to ONTAP host '''
+
+ def __init__(self, kind=None):
+ ''' save arguments '''
+ self.type = kind
+ self.xml_in = None
+ self.xml_out = None
+
+ def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
+ ''' mock invoke_successfully returning xml data '''
+ self.xml_in = xml
+ if self.type == 'auto_giveback_enabled':
+ xml = self.build_storage_auto_giveback_enabled_info()
+ elif self.type == 'auto_giveback_disabled':
+ xml = self.build_storage_auto_giveback_disabled_info()
+ elif self.type == 'auto_giveback_fail':
+ raise netapp_utils.zapi.NaApiError(code='TEST', message="This exception is from the unit test")
+ self.xml_out = xml
+ return xml
+
+ @staticmethod
+ def build_storage_auto_giveback_enabled_info():
+ ''' build xml data for cf-get-iter '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ data = {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'storage-failover-info': {
+ 'sfo-node-info': {
+ 'node-related-info': {
+ 'node': 'node1'
+ }
+ },
+ 'sfo-options-info': {
+ 'options-related-info': {
+ 'auto-giveback-enabled': 'true',
+ 'sfo-giveback-options-info': {
+ 'giveback-options': {
+ 'auto-giveback-after-panic-enabled': 'true'
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ xml.translate_struct(data)
+ return xml
+
+ @staticmethod
+ def build_storage_auto_giveback_disabled_info():
+ ''' build xml data for cf-get-iter '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ data = {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'storage-failover-info': {
+ 'sfo-node-info': {
+ 'node-related-info': {
+ 'node': 'node1'
+ }
+ },
+ 'sfo-options-info': {
+ 'options-related-info': {
+ 'auto-giveback-enabled': 'false',
+ 'sfo-giveback-options-info': {
+ 'giveback-options': {
+ 'auto-giveback-after-panic-enabled': 'false'
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ xml.translate_struct(data)
+ return xml
+
+
+class TestMyModule(unittest.TestCase):
+ ''' a group of related Unit Tests '''
+
+ def setUp(self):
+ self.server = MockONTAPConnection()
+ self.onbox = False
+
+ def set_default_args(self, use_rest=None):
+ if self.onbox:
+ hostname = '10.10.10.10'
+ username = 'username'
+ password = 'password'
+ name = 'node1'
+ auto_giveback_enabled = True
+ auto_giveback_after_panic_enabled = True
+ else:
+ hostname = '10.10.10.10'
+ username = 'username'
+ password = 'password'
+ name = 'node1'
+ auto_giveback_enabled = True
+ auto_giveback_after_panic_enabled = True
+
+ args = dict({
+ 'hostname': hostname,
+ 'username': username,
+ 'password': password,
+ 'name': name,
+ 'auto_giveback_enabled': auto_giveback_enabled,
+ 'auto_giveback_after_panic_enabled': auto_giveback_after_panic_enabled
+ })
+
+ if use_rest is not None:
+ args['use_rest'] = use_rest
+
+ return args
+
+ @staticmethod
+ def get_storage_auto_giveback_mock_object(cx_type='zapi', kind=None):
+ storage_auto_giveback_obj = storage_auto_giveback_module()
+ if cx_type == 'zapi':
+ if kind is None:
+ storage_auto_giveback_obj.server = MockONTAPConnection()
+ else:
+ storage_auto_giveback_obj.server = MockONTAPConnection(kind=kind)
+ return storage_auto_giveback_obj
+
+ def test_module_fail_when_required_args_missing(self):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ storage_auto_giveback_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+ def test_ensure_get_called_existing(self):
+ ''' test get_storage_auto_giveback for existing config '''
+ set_module_args(self.set_default_args(use_rest='Never'))
+ my_obj = storage_auto_giveback_module()
+ my_obj.server = MockONTAPConnection(kind='auto_giveback_enabled')
+ assert my_obj.get_storage_auto_giveback()
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_storage_auto_giveback.NetAppOntapStorageAutoGiveback.modify_storage_auto_giveback')
+ def test_successful_enable(self, modify_storage_auto_giveback):
+ ''' enable storage_auto_giveback and testing idempotency '''
+ set_module_args(self.set_default_args(use_rest='Never'))
+ my_obj = storage_auto_giveback_module()
+ my_obj.ems_log_event = Mock(return_value=None)
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('auto_giveback_disabled')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+ modify_storage_auto_giveback.assert_called_with()
+ # to reset na_helper from remembering the previous 'changed' value
+ set_module_args(self.set_default_args(use_rest='Never'))
+ my_obj = storage_auto_giveback_module()
+ my_obj.ems_log_event = Mock(return_value=None)
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('auto_giveback_enabled')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_storage_auto_giveback.NetAppOntapStorageAutoGiveback.modify_storage_auto_giveback')
+ def test_successful_disable(self, modify_storage_auto_giveback):
+ ''' disable storage_auto_giveback and testing idempotency '''
+ data = self.set_default_args(use_rest='Never')
+ data['auto_giveback_enabled'] = False
+ data['auto_giveback_after_panic_enabled'] = False
+ set_module_args(data)
+ my_obj = storage_auto_giveback_module()
+ my_obj.ems_log_event = Mock(return_value=None)
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('auto_giveback_enabled')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+ # modify_storage_auto_giveback.assert_called_with()
+ # to reset na_helper from remembering the previous 'changed' value
+ data = self.set_default_args(use_rest='Never')
+ data['auto_giveback_enabled'] = False
+ data['auto_giveback_after_panic_enabled'] = False
+ set_module_args(data)
+ my_obj = storage_auto_giveback_module()
+ my_obj.ems_log_event = Mock(return_value=None)
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('auto_giveback_disabled')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert not exc.value.args[0]['changed']
+
+ def test_if_all_methods_catch_exception(self):
+ data = self.set_default_args(use_rest='Never')
+ set_module_args(data)
+ my_obj = storage_auto_giveback_module()
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('auto_giveback_fail')
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.modify_storage_auto_giveback()
+ assert 'Error modifying auto giveback' in exc.value.args[0]['msg']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_error(self, mock_request):
+ data = self.set_default_args()
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['generic_error'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_storage_auto_giveback_mock_object(cx_type='rest').apply()
+ assert SRR['generic_error'][2] in exc.value.args[0]['msg']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_successful_enabled_rest(self, mock_request):
+ data = self.set_default_args()
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['storage_auto_giveback_disabled_record'], # get
+ SRR['empty_good'], # patch
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_storage_auto_giveback_mock_object(cx_type='rest').apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_idempotent_enabled_rest(self, mock_request):
+ data = self.set_default_args()
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['storage_auto_giveback_enabled_record'], # get
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_storage_auto_giveback_mock_object(cx_type='rest').apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_successful_disabled_rest(self, mock_request):
+ data = self.set_default_args()
+ data['auto_giveback_enabled'] = False
+ data['auto_giveback_after_panic_enabled'] = False
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['storage_auto_giveback_enabled_record'], # get
+ SRR['empty_good'], # patch
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_storage_auto_giveback_mock_object(cx_type='rest').apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_idempotent_disabled_rest(self, mock_request):
+ data = self.set_default_args()
+ data['auto_giveback_enabled'] = False
+ data['auto_giveback_after_panic_enabled'] = False
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['storage_auto_giveback_disabled_record'], # get
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_storage_auto_giveback_mock_object(cx_type='rest').apply()
+ assert not exc.value.args[0]['changed']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_storage_failover.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_storage_failover.py
new file mode 100644
index 000000000..aa0b7703e
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_storage_failover.py
@@ -0,0 +1,350 @@
+''' unit tests ONTAP Ansible module: na_ontap_storage_failover '''
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat import unittest
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_storage_failover \
+ import NetAppOntapStorageFailover as storage_failover_module # module under test
+
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+# REST API canned responses when mocking send_request
+SRR = {
+ # common responses
+ 'is_rest': (200, {}, None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'no_records': (200, {'records': []}, None),
+ 'end_of_sequence': (500, None, "Ooops, the UT needs one more SRR response"),
+ 'generic_error': (400, None, "Expected error"),
+ # module specific responses
+ 'storage_failover_enabled_record': (200, {
+ 'num_records': 1,
+ 'records': [{
+ 'name': 'node1',
+ 'uuid': '56ab5d21-312a-11e8-9166-9d4fc452db4e',
+ 'ha': {
+ 'enabled': True
+ }
+ }]
+ }, None),
+ 'storage_failover_disabled_record': (200, {
+ 'num_records': 1,
+ "records": [{
+ 'name': 'node1',
+ 'uuid': '56ab5d21-312a-11e8-9166-9d4fc452db4e',
+ 'ha': {
+ 'enabled': False
+ }
+ }]
+ }, None),
+ 'no_ha_record': (200, {
+ 'num_records': 1,
+ "records": [{
+ 'name': 'node1',
+ 'uuid': '56ab5d21-312a-11e8-9166-9d4fc452db4e',
+ }]
+ }, None)
+}
+
+
+class MockONTAPConnection(object):
+ ''' mock server connection to ONTAP host '''
+
+ def __init__(self, kind=None):
+ ''' save arguments '''
+ self.type = kind
+ self.xml_in = None
+ self.xml_out = None
+
+ def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
+ ''' mock invoke_successfully returning xml data '''
+ self.xml_in = xml
+ if self.type == 'storage_failover_enabled':
+ xml = self.build_storage_failover_enabled_info()
+ elif self.type == 'storage_failover_disabled':
+ xml = self.build_storage_failover_disabled_info()
+ elif self.type == 'storage_failover_fail':
+ raise netapp_utils.zapi.NaApiError(code='TEST', message="This exception is from the unit test")
+ self.xml_out = xml
+ return xml
+
+ @staticmethod
+ def build_storage_failover_enabled_info():
+ ''' build xml data for cf-status '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ data = {
+ 'is-enabled': 'true'
+ }
+
+ xml.translate_struct(data)
+ return xml
+
+ @staticmethod
+ def build_storage_failover_disabled_info():
+ ''' build xml data for cf-status '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ data = {
+ 'is-enabled': 'false'
+ }
+
+ xml.translate_struct(data)
+ return xml
+
+
+class TestMyModule(unittest.TestCase):
+ ''' a group of related Unit Tests '''
+
+ def setUp(self):
+ self.server = MockONTAPConnection()
+ self.onbox = False
+
+ def set_default_args(self, use_rest=None):
+ if self.onbox:
+ hostname = '10.10.10.10'
+ username = 'username'
+ password = 'password'
+ node_name = 'node1'
+ else:
+ hostname = '10.10.10.10'
+ username = 'username'
+ password = 'password'
+ node_name = 'node1'
+
+ args = dict({
+ 'state': 'present',
+ 'hostname': hostname,
+ 'username': username,
+ 'password': password,
+ 'node_name': node_name
+ })
+
+ if use_rest is not None:
+ args['use_rest'] = use_rest
+
+ return args
+
+ @staticmethod
+ def get_storage_failover_mock_object(cx_type='zapi', kind=None):
+ storage_failover_obj = storage_failover_module()
+ if cx_type == 'zapi':
+ if kind is None:
+ storage_failover_obj.server = MockONTAPConnection()
+ else:
+ storage_failover_obj.server = MockONTAPConnection(kind=kind)
+ return storage_failover_obj
+
+ def test_module_fail_when_required_args_missing(self):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ storage_failover_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+ def test_ensure_get_called_existing(self):
+ ''' test get_storage_failover for existing config '''
+ set_module_args(self.set_default_args(use_rest='Never'))
+ my_obj = storage_failover_module()
+ my_obj.server = MockONTAPConnection(kind='storage_failover_enabled')
+ assert my_obj.get_storage_failover()
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_storage_failover.NetAppOntapStorageFailover.modify_storage_failover')
+ def test_successful_enable(self, modify_storage_failover):
+ ''' enable storage_failover and testing idempotency '''
+ set_module_args(self.set_default_args(use_rest='Never'))
+ my_obj = storage_failover_module()
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('storage_failover_disabled')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+ modify_storage_failover.assert_called_with({'is_enabled': False})
+ # to reset na_helper from remembering the previous 'changed' value
+ set_module_args(self.set_default_args(use_rest='Never'))
+ my_obj = storage_failover_module()
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('storage_failover_enabled')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_storage_failover.NetAppOntapStorageFailover.modify_storage_failover')
+ def test_successful_disable(self, modify_storage_failover):
+ ''' disable storage_failover and testing idempotency '''
+ data = self.set_default_args(use_rest='Never')
+ data['state'] = 'absent'
+ set_module_args(data)
+ my_obj = storage_failover_module()
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('storage_failover_enabled')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+ modify_storage_failover.assert_called_with({'is_enabled': True})
+ # to reset na_helper from remembering the previous 'changed' value
+ my_obj = storage_failover_module()
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('storage_failover_disabled')
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert not exc.value.args[0]['changed']
+
+ def test_if_all_methods_catch_exception(self):
+ data = self.set_default_args(use_rest='Never')
+ set_module_args(data)
+ my_obj = storage_failover_module()
+ if not self.onbox:
+ my_obj.server = MockONTAPConnection('storage_failover_fail')
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_obj.modify_storage_failover(self.get_storage_failover_mock_object())
+ assert 'Error modifying storage failover' in exc.value.args[0]['msg']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.has_netapp_lib')
+ def test_negative_no_netapp_lib(self, mock_request):
+ data = self.set_default_args(use_rest='Never')
+ set_module_args(data)
+ mock_request.return_value = False
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_storage_failover_mock_object(cx_type='rest').apply()
+ assert 'Error: the python NetApp-Lib module is required.' in exc.value.args[0]['msg']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_error(self, mock_request):
+ data = self.set_default_args()
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['generic_error'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_storage_failover_mock_object(cx_type='rest').apply()
+ assert SRR['generic_error'][2] in exc.value.args[0]['msg']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_successful_enabled_rest(self, mock_request):
+ data = self.set_default_args()
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['storage_failover_disabled_record'], # get
+ SRR['empty_good'], # patch
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_storage_failover_mock_object(cx_type='rest').apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_idempotent_enabled_rest(self, mock_request):
+ data = self.set_default_args()
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['storage_failover_enabled_record'], # get
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_storage_failover_mock_object(cx_type='rest').apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_successful_disabled_rest(self, mock_request):
+ data = self.set_default_args()
+ data['state'] = 'absent'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['storage_failover_enabled_record'], # get
+ SRR['empty_good'], # patch
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_storage_failover_mock_object(cx_type='rest').apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_idempotent_disabled_rest(self, mock_request):
+ data = self.set_default_args()
+ data['state'] = 'absent'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['storage_failover_disabled_record'], # get
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_storage_failover_mock_object(cx_type='rest').apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_negative_no_ha_rest(self, mock_request):
+ data = self.set_default_args()
+ data['state'] = 'present'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['no_ha_record'], # get
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_storage_failover_mock_object(cx_type='rest').apply()
+ assert 'HA is not available on node: node1' in exc.value.args[0]['msg']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_negative_node_not_found_rest(self, mock_request):
+ data = self.set_default_args()
+ data['state'] = 'absent'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['no_records'],
+ SRR['storage_failover_disabled_record'], # get
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_storage_failover_mock_object(cx_type='rest').apply()
+ assert 'REST API did not return failover details for node' in exc.value.args[0]['msg']
+ assert 'current nodes: node1' in exc.value.args[0]['msg']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_negative_node_not_found_rest_no_names(self, mock_request):
+ data = self.set_default_args()
+ data['state'] = 'absent'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['no_records'],
+ SRR['no_records'], # get all nodes
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_storage_failover_mock_object(cx_type='rest').apply()
+ assert 'REST API did not return failover details for node' in exc.value.args[0]['msg']
+ assert 'current nodes: node1' not in exc.value.args[0]['msg']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_negative_node_not_found_rest_error_on_get_nodes(self, mock_request):
+ data = self.set_default_args()
+ data['state'] = 'absent'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['no_records'],
+ SRR['generic_error'], # get all nodes
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_storage_failover_mock_object(cx_type='rest').apply()
+ assert 'REST API did not return failover details for node' in exc.value.args[0]['msg']
+ assert 'current nodes: node1' not in exc.value.args[0]['msg']
+ assert 'failed to get list of nodes' in exc.value.args[0]['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_svm.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_svm.py
new file mode 100644
index 000000000..d18d32a57
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_svm.py
@@ -0,0 +1,1251 @@
+# (c) 2018-2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test template for ONTAP Ansible module '''
+
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import pytest
+import sys
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import \
+ assert_warning_was_raised, call_main, clear_warnings, create_and_apply, create_module, expect_and_capture_ansible_exception, patch_ansible, print_warnings
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import \
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_svm \
+ import NetAppOntapSVM as svm_module, main as my_main # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+# REST API canned responses when mocking send_request
+
+svm_info = {
+ "uuid": "09e9fd5e-8ebd-11e9-b162-005056b39fe7",
+ "name": "test_svm",
+ "state": "running",
+ "subtype": "default",
+ "language": "c.utf_8",
+ "aggregates": [{"name": "aggr_1",
+ "uuid": "850dd65b-8811-4611-ac8c-6f6240475ff9"},
+ {"name": "aggr_2",
+ "uuid": "850dd65b-8811-4611-ac8c-6f6240475ff9"}],
+ "comment": "new comment",
+ "ipspace": {"name": "ansible_ipspace",
+ "uuid": "2b760d31-8dfd-11e9-b162-005056b39fe7"},
+ "snapshot_policy": {"uuid": "3b611707-8dfd-11e9-b162-005056b39fe7",
+ "name": "old_snapshot_policy"},
+ "nfs": {"enabled": True, "allowed": True},
+ "cifs": {"enabled": False},
+ "iscsi": {"enabled": False},
+ "fcp": {"enabled": False},
+ "nvme": {"enabled": False},
+ 'max_volumes': 3333
+}
+
+svm_info_cert1 = dict(svm_info)
+svm_info_cert1['certificate'] = {'name': 'cert_1', 'uuid': 'cert_uuid_1'}
+svm_info_cert2 = dict(svm_info)
+svm_info_cert2['certificate'] = {'name': 'cert_2', 'uuid': 'cert_uuid_2'}
+
+SRR = rest_responses({
+ 'svm_record': (200, {'records': [svm_info]}, None),
+ 'svm_record_cert1': (200, {'records': [svm_info_cert1]}, None),
+ 'svm_record_cert2': (200, {'records': [svm_info_cert2]}, None),
+ 'svm_record_ap': (200,
+ {'records': [{"name": "test_svm",
+ "state": "running",
+ "aggregates": [{"name": "aggr_1",
+ "uuid": "850dd65b-8811-4611-ac8c-6f6240475ff9"},
+ {"name": "aggr_2",
+ "uuid": "850dd65b-8811-4611-ac8c-6f6240475ff9"}],
+ "ipspace": {"name": "ansible_ipspace",
+ "uuid": "2b760d31-8dfd-11e9-b162-005056b39fe7"},
+ "snapshot_policy": {"uuid": "3b611707-8dfd-11e9-b162-005056b39fe7",
+ "name": "old_snapshot_policy"},
+ "nfs": {"enabled": False},
+ "cifs": {"enabled": True, "allowed": True},
+ "iscsi": {"enabled": True, "allowed": True},
+ "fcp": {"enabled": False},
+ "nvme": {"enabled": False},
+ "language": "de.utf_8",
+ "uuid": "svm_uuid"
+ }]}, None),
+ 'cli_record': (200,
+ {'records': [{"max_volumes": 100, "allowed_protocols": ['nfs', 'iscsi']}]}, None),
+ 'certificate_record_1': (200,
+ {'records': [{"name": "cert_1",
+ "uuid": "cert_uuid_1"}]}, None),
+ 'certificate_record_2': (200,
+ {'records': [{"name": "cert_2",
+ "uuid": "cert_uuid_2"}]}, None),
+ 'svm_web_record_1': (200, {
+ 'records': [{
+ 'certificate': {
+ "uuid": "cert_uuid_1"
+ },
+ 'client_enabled': False,
+ 'ocsp_enabled': False,
+ }]}, None),
+ 'svm_web_record_2': (200, {
+ 'records': [{
+ 'certificate': {
+ "uuid": "cert_uuid_2"
+ },
+ 'client_enabled': True,
+ 'ocsp_enabled': True,
+ }]}, None)
+}, False)
+
+DEFAULT_ARGS = {
+ 'name': 'test_svm',
+ 'aggr_list': 'aggr_1,aggr_2',
+ 'ipspace': 'ansible_ipspace',
+ 'comment': 'new comment',
+ 'subtype': 'default',
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!'
+}
+
+vserver_info = {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'vserver-info': {
+ 'vserver-name': 'test_svm',
+ 'ipspace': 'ansible_ipspace',
+ 'root-volume': 'ansible_vol',
+ 'root-volume-aggregate': 'ansible_aggr',
+ 'language': 'c.utf_8',
+ 'comment': 'new comment',
+ 'snapshot-policy': 'old_snapshot_policy',
+ 'vserver-subtype': 'default',
+ 'allowed-protocols': [{'protocol': 'nfs'}, {'protocol': 'cifs'}],
+ 'aggr-list': [{'aggr-name': 'aggr_1'}, {'aggr-name': 'aggr_2'}],
+ }}}
+
+
+ZRR = zapi_responses({
+ 'svm_record': build_zapi_response(vserver_info)
+})
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ error = create_module(svm_module, {}, fail=True)['msg']
+ assert 'missing required arguments:' in error
+ assert 'hostname' in error
+ assert 'name' in error
+
+
+def test_error_missing_name():
+ ''' Test if create throws an error if name is not specified'''
+ register_responses([
+ ])
+ args = dict(DEFAULT_ARGS)
+ args.pop('name')
+ assert create_module(svm_module, args, fail=True)['msg'] == 'missing required arguments: name'
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.has_netapp_lib')
+def test_error_missing_netapp_lib(mock_has_netapp_lib):
+ register_responses([
+ ('GET', 'cluster', SRR['is_zapi']),
+ ])
+ mock_has_netapp_lib.return_value = False
+ msg = 'Error: the python NetApp-Lib module is required. Import error: None'
+ assert msg == create_module(svm_module, DEFAULT_ARGS, fail=True)['msg']
+
+
+def test_successful_create_zapi():
+ '''Test successful create'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_zapi']),
+ ('ZAPI', 'vserver-get-iter', ZRR['no_records']),
+ ('ZAPI', 'vserver-create', ZRR['success']),
+ ('ZAPI', 'vserver-modify', ZRR['success']),
+ ])
+ assert create_and_apply(svm_module, DEFAULT_ARGS)['changed']
+
+
+def test_create_idempotency():
+ '''Test API create'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_zapi']),
+ ('ZAPI', 'vserver-get-iter', ZRR['svm_record']),
+ ])
+ assert not create_and_apply(svm_module, DEFAULT_ARGS)['changed']
+
+
+def test_create_error():
+ '''Test successful create'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_zapi']),
+ ('ZAPI', 'vserver-get-iter', ZRR['no_records']),
+ ('ZAPI', 'vserver-create', ZRR['error']),
+ ])
+ msg = 'Error provisioning SVM test_svm: NetApp API failed. Reason - 12345:synthetic error for UT purpose'
+ assert create_and_apply(svm_module, DEFAULT_ARGS, fail=True)['msg'] == msg
+
+
+def test_successful_delete():
+ '''Test successful delete'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_zapi']),
+ ('ZAPI', 'vserver-get-iter', ZRR['svm_record']),
+ ('ZAPI', 'vserver-destroy', ZRR['success']),
+ ])
+ _modify_options_with_expected_change('state', 'absent')
+
+
+def test_error_delete():
+ '''Test delete with ZAPI error
+ '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_zapi']),
+ ('ZAPI', 'vserver-get-iter', ZRR['svm_record']),
+ ('ZAPI', 'vserver-destroy', ZRR['error']),
+ ])
+ module_args = {
+ 'state': 'absent',
+ }
+ msg = 'Error deleting SVM test_svm: NetApp API failed. Reason - 12345:synthetic error for UT purpose'
+ assert call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_delete_idempotency():
+ '''Test delete idempotency '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_zapi']),
+ ('ZAPI', 'vserver-get-iter', ZRR['no_records']),
+ ])
+ module_args = {
+ 'state': 'absent',
+ }
+ assert not create_and_apply(svm_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_init():
+ '''Validate that:
+ admin_state is ignored with ZAPI
+ language is set to lower case for C.UTF-8
+ '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_zapi']),
+ ])
+ module_args = {
+ 'admin_state': 'running',
+ 'language': 'C.uTf-8'
+ }
+ my_obj = create_module(svm_module, DEFAULT_ARGS, module_args)
+ assert my_obj.parameters['language'] == 'c.utf_8'
+ assert_warning_was_raised('admin_state is ignored when ZAPI is used.')
+
+
+def test_init_error():
+ '''Validate that:
+ unallowed protocol raises an error
+ services is not supported with ZAPI
+ '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_zapi']),
+ ('GET', 'cluster', SRR['is_zapi']),
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ])
+ module_args = {
+ 'allowed_protocols': 'dummy,humpty,dumpty,cifs,nfs',
+ }
+ error = create_module(svm_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert 'Unexpected value dummy in allowed_protocols.' in error
+ assert 'Unexpected value humpty in allowed_protocols.' in error
+ assert 'Unexpected value dumpty in allowed_protocols.' in error
+ assert 'cifs' not in error
+ assert 'nfs' not in error
+
+ module_args = {
+ 'services': {},
+ }
+ error = create_module(svm_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert error == 'using services requires ONTAP 9.6 or later and REST must be enabled - Unreachable - using ZAPI.'
+ module_args = {
+ 'services': {'ndmp': {'allowed': True}},
+ }
+ error = create_module(svm_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert error == 'using ndmp requires ONTAP 9.7 or later and REST must be enabled - ONTAP version: 9.6.0 - using REST.'
+
+
+def test_successful_rename():
+ '''Test successful rename'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_zapi']),
+ ('ZAPI', 'vserver-get-iter', ZRR['no_records']),
+ ('ZAPI', 'vserver-get-iter', ZRR['svm_record']),
+ ('ZAPI', 'vserver-rename', ZRR['success']),
+ ])
+ module_args = {
+ 'from_name': 'test_svm',
+ 'name': 'test_new_svm',
+ }
+ assert create_and_apply(svm_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_rename_no_from():
+ '''Test error rename'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_zapi']),
+ ('ZAPI', 'vserver-get-iter', ZRR['no_records']),
+ ('ZAPI', 'vserver-get-iter', ZRR['no_records']),
+ ])
+ module_args = {
+ 'from_name': 'test_svm',
+ 'name': 'test_new_svm',
+ }
+ msg = 'Error renaming SVM test_new_svm: no SVM with from_name test_svm.'
+ assert create_and_apply(svm_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_error_rename_zapi():
+ '''Test error rename'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_zapi']),
+ ('ZAPI', 'vserver-get-iter', ZRR['no_records']),
+ ('ZAPI', 'vserver-get-iter', ZRR['svm_record']),
+ ('ZAPI', 'vserver-rename', ZRR['error']),
+ ])
+ module_args = {
+ 'from_name': 'test_svm',
+ 'name': 'test_new_svm',
+ }
+ msg = 'Error renaming SVM test_svm: NetApp API failed. Reason - 12345:synthetic error for UT purpose'
+ assert create_and_apply(svm_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_successful_modify_language():
+ '''Test successful modify language'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_zapi']),
+ ('ZAPI', 'vserver-get-iter', ZRR['svm_record']),
+ ('ZAPI', 'vserver-modify', ZRR['success']),
+ ])
+ _modify_options_with_expected_change('language', 'c')
+
+
+def test_error_modify_language():
+ '''Test error modify language'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_zapi']),
+ ('ZAPI', 'vserver-get-iter', ZRR['svm_record']),
+ ('ZAPI', 'vserver-modify', ZRR['error']),
+ ])
+ module_args = {
+ 'language': 'c',
+ }
+ msg = 'Error modifying SVM test_svm: NetApp API failed. Reason - 12345:synthetic error for UT purpose'
+ assert create_and_apply(svm_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_error_modify_fixed_properties():
+ '''Test error modifying a fixed property'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_zapi']),
+ ('ZAPI', 'vserver-get-iter', ZRR['svm_record']),
+ ('GET', 'cluster', SRR['is_zapi']),
+ ('ZAPI', 'vserver-get-iter', ZRR['svm_record']),
+ ])
+ module_args = {
+ 'ipspace': 'new',
+ }
+ msg = 'Error modifying SVM test_svm: cannot modify ipspace - current: ansible_ipspace - desired: new.'
+ assert create_and_apply(svm_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == msg
+ module_args = {
+ 'ipspace': 'new',
+ 'root_volume': 'new_root'
+ }
+ msg = 'Error modifying SVM test_svm: cannot modify root_volume - current: ansible_vol - desired: new_root, '\
+ 'ipspace - current: ansible_ipspace - desired: new.'
+ assert create_and_apply(svm_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_successful_modify_snapshot_policy():
+ '''Test successful modify language'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_zapi']),
+ ('ZAPI', 'vserver-get-iter', ZRR['svm_record']),
+ ('ZAPI', 'vserver-modify', ZRR['success']),
+ ])
+ _modify_options_with_expected_change(
+ 'snapshot_policy', 'new_snapshot_policy'
+ )
+
+
+def test_successful_modify_allowed_protocols():
+ '''Test successful modify allowed protocols'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_zapi']),
+ ('ZAPI', 'vserver-get-iter', ZRR['svm_record']),
+ ('ZAPI', 'vserver-modify', ZRR['success']),
+ ])
+ _modify_options_with_expected_change(
+ 'allowed_protocols', 'nvme,fcp'
+ )
+
+
+def test_successful_modify_aggr_list():
+ '''Test successful modify aggr-list'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_zapi']),
+ ('ZAPI', 'vserver-get-iter', ZRR['svm_record']),
+ ('ZAPI', 'vserver-modify', ZRR['success']),
+ ])
+ _modify_options_with_expected_change(
+ 'aggr_list', 'aggr_3,aggr_4'
+ )
+
+
+def test_successful_modify_aggr_list_star():
+ '''Test successful modify aggr-list'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_zapi']),
+ ('ZAPI', 'vserver-get-iter', ZRR['svm_record']),
+ ('ZAPI', 'vserver-modify', ZRR['success']),
+ ])
+ module_args = {
+ 'aggr_list': '*'
+ }
+ results = create_and_apply(svm_module, DEFAULT_ARGS, module_args)
+ assert results['changed']
+ assert_warning_was_raised("na_ontap_svm: changed always 'True' when aggr_list is '*'.")
+
+
+def _modify_options_with_expected_change(arg0, arg1):
+ module_args = {
+ arg0: arg1,
+ }
+ assert create_and_apply(svm_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'svm/svms', SRR['generic_error']),
+ ])
+ module_args = {
+ 'root_volume': 'whatever',
+ 'aggr_list': '*',
+ 'ignore_rest_unsupported_options': 'true',
+ }
+ assert create_and_apply(svm_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == 'calling: svm/svms: got Expected error.'
+
+
+def test_rest_error_unsupported_parm():
+ register_responses([
+ ])
+ module_args = {
+ 'root_volume': 'not_supported_by_rest',
+ 'use_rest': 'always',
+ }
+ assert create_module(svm_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == "REST API currently does not support 'root_volume'"
+
+
+def test_rest_successfully_create():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'svm/svms', SRR['zero_records']),
+ ('POST', 'svm/svms', SRR['success']),
+ ])
+ assert create_and_apply(svm_module, DEFAULT_ARGS)['changed']
+
+
+def test_rest_error_create():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'svm/svms', SRR['zero_records']),
+ ('POST', 'svm/svms', SRR['generic_error']),
+ ])
+ msg = 'Error in create: calling: svm/svms: got Expected error.'
+ assert create_and_apply(svm_module, DEFAULT_ARGS, fail=True)['msg'] == msg
+
+
+def test_rest_create_idempotency():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'svm/svms', SRR['svm_record']),
+ ])
+ module_args = {
+ 'root_volume': 'whatever',
+ 'aggr_list': '*',
+ 'ignore_rest_unsupported_options': 'true',
+ }
+ assert not create_and_apply(svm_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_successful_delete():
+ '''Test successful delete'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'svm/svms', SRR['svm_record']),
+ ('DELETE', 'svm/svms/09e9fd5e-8ebd-11e9-b162-005056b39fe7', SRR['success']),
+ ])
+ module_args = {
+ 'state': 'absent',
+ }
+ assert create_and_apply(svm_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_error_delete():
+ '''Test error delete'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'svm/svms', SRR['svm_record']),
+ ('DELETE', 'svm/svms/09e9fd5e-8ebd-11e9-b162-005056b39fe7', SRR['generic_error']),
+ ])
+ module_args = {
+ 'state': 'absent',
+ }
+ msg = 'Error in delete: calling: svm/svms/09e9fd5e-8ebd-11e9-b162-005056b39fe7: got Expected error.'
+ assert create_and_apply(svm_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_rest_error_delete_no_svm():
+ '''Test error delete'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ])
+ my_obj = create_module(svm_module, DEFAULT_ARGS)
+ msg = 'Internal error, expecting SVM object in delete'
+ assert expect_and_capture_ansible_exception(my_obj.delete_vserver, 'fail')['msg'] == msg
+
+
+def test_rest_delete_idempotency():
+ '''Test delete idempotency'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'svm/svms', SRR['zero_records']),
+ ])
+ module_args = {
+ 'state': 'absent',
+ }
+ assert not create_and_apply(svm_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_successful_rename():
+ '''Test successful rename'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'svm/svms', SRR['zero_records']),
+ ('GET', 'svm/svms', SRR['svm_record']),
+ ('PATCH', 'svm/svms/09e9fd5e-8ebd-11e9-b162-005056b39fe7', SRR['success']),
+ ])
+ module_args = {
+ 'from_name': 'test_svm',
+ 'name': 'test_new_svm',
+ }
+ assert create_and_apply(svm_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_successful_modify_language():
+ '''Test successful modify language'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'svm/svms', SRR['svm_record']),
+ ('PATCH', 'svm/svms/09e9fd5e-8ebd-11e9-b162-005056b39fe7', SRR['success']),
+ ])
+ module_args = {
+ 'language': 'c',
+ }
+ assert create_and_apply(svm_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_successful_get():
+ '''Test successful get'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'svm/svms', SRR['svm_record']),
+ ('GET', 'svm/svms', SRR['svm_record_ap']),
+ ])
+ module_args = {
+ 'admin_state': 'running',
+ 'language': 'c'
+ }
+ my_obj = create_module(svm_module, DEFAULT_ARGS, module_args)
+ current = my_obj.get_vserver()
+ print(current)
+ assert current['services']['nfs']['allowed']
+ assert not current['services']['cifs']['enabled']
+ current = my_obj.get_vserver()
+ print(current)
+ assert not current['services']['nfs']['enabled']
+ assert current['services']['cifs']['allowed']
+ assert current['services']['iscsi']['allowed']
+
+
+def test_rest_successfully_create_ignore_zapi_option():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'svm/svms', SRR['zero_records']),
+ ('POST', 'svm/svms', SRR['success']),
+ ])
+ module_args = {
+ 'root_volume': 'whatever',
+ 'aggr_list': '*',
+ 'ignore_rest_unsupported_options': 'true',
+ }
+ assert create_and_apply(svm_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_successfully_create_with_service():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'svm/svms', SRR['zero_records']),
+ ('POST', 'svm/svms', SRR['success']),
+ ])
+ module_args = {
+ 'services': {'nfs': {'allowed': True, 'enabled': True}, 'fcp': {'allowed': True, 'enabled': True}}
+ }
+ assert create_and_apply(svm_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_successfully_modify_with_service():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'svm/svms', SRR['svm_record']),
+ ('PATCH', 'svm/svms/09e9fd5e-8ebd-11e9-b162-005056b39fe7', SRR['success']),
+ ('POST', 'protocols/san/fcp/services', SRR['success']),
+ ])
+ module_args = {
+ 'admin_state': 'stopped',
+ 'services': {'nfs': {'allowed': True, 'enabled': True}, 'fcp': {'allowed': True, 'enabled': True}}
+ }
+ assert create_and_apply(svm_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_successfully_enable_service():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('POST', 'protocols/san/fcp/services', SRR['success']),
+ ])
+ module_args = {
+ 'admin_state': 'running',
+ 'services': {'nfs': {'allowed': True, 'enabled': True}}
+ }
+ my_obj = create_module(svm_module, DEFAULT_ARGS, module_args)
+ modify = {'services': {'nfs': {'allowed': True}, 'fcp': {'enabled': True}}}
+ current = {'services': {'nfs': {'allowed': True}}, 'uuid': 'uuid'}
+ assert my_obj.modify_services(modify, current) is None
+
+
+def test_rest_successfully_reenable_service():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('PATCH', 'protocols/san/fcp/services/uuid', SRR['success']),
+ ])
+ module_args = {
+ 'admin_state': 'running',
+ 'services': {'nfs': {'allowed': True, 'enabled': True}}
+ }
+ my_obj = create_module(svm_module, DEFAULT_ARGS, module_args)
+ modify = {'services': {'nfs': {'allowed': True}, 'fcp': {'enabled': True}}}
+ fcp_dict = {'_links': {'self': {'href': 'fcp_link'}}}
+ current = {'services': {'nfs': {'allowed': True}}, 'uuid': 'uuid', 'fcp': fcp_dict}
+ assert my_obj.modify_services(modify, current) is None
+
+
+def test_rest_negative_enable_service():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ])
+ module_args = {
+ 'admin_state': 'running',
+ 'services': {'nfs': {'allowed': True, 'enabled': True}}
+ }
+ my_obj = create_module(svm_module, DEFAULT_ARGS, module_args)
+ modify = {'services': {'nfs': {'allowed': True}, 'bad_value': {'enabled': True}}, 'name': 'new_name'}
+ current = {'services': {'nfs': {'allowed': True}}, 'uuid': 'uuid'}
+ error = expect_and_capture_ansible_exception(my_obj.modify_services, 'fail', modify, current)['msg']
+ assert error == 'Internal error, unexpecting service: bad_value.'
+
+
+def test_rest_negative_modify_services():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('POST', 'protocols/san/fcp/services', SRR['generic_error']),
+ ])
+ module_args = {
+ 'admin_state': 'running',
+ 'services': {'nfs': {'allowed': True, 'enabled': True}}
+ }
+ my_obj = create_module(svm_module, DEFAULT_ARGS, module_args)
+ modify = {'services': {'nfs': {'allowed': True}, 'fcp': {'enabled': True}}, 'name': 'new_name'}
+ current = {'services': {'nfs': {'allowed': True}}, 'uuid': 'uuid'}
+ error = expect_and_capture_ansible_exception(my_obj.modify_services, 'fail', modify, current)['msg']
+ assert error == 'Error in modify service for fcp: calling: protocols/san/fcp/services: got Expected error.'
+
+
+def test_rest_negative_modify_current_none():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ])
+ module_args = {
+ 'admin_state': 'running',
+ 'services': {'nfs': {'allowed': True, 'enabled': True}}
+ }
+ my_obj = create_module(svm_module, DEFAULT_ARGS, module_args)
+ modify = {'enabled_protocols': ['nfs', 'fcp']}
+ current = None
+ error = expect_and_capture_ansible_exception(my_obj.modify_vserver, 'fail', modify, current)['msg']
+ assert error == 'Internal error, expecting SVM object in modify.'
+
+
+def test_rest_negative_modify_modify_none():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ])
+ module_args = {
+ 'admin_state': 'running',
+ 'services': {'nfs': {'allowed': True, 'enabled': True}}
+ }
+ my_obj = create_module(svm_module, DEFAULT_ARGS, module_args)
+ modify = {}
+ current = {'enabled_protocols': ['nfs'], 'disabled_protocols': ['fcp', 'iscsi', 'nvme'], 'uuid': 'uuid'}
+ error = expect_and_capture_ansible_exception(my_obj.modify_vserver, 'fail', modify, current)['msg']
+ assert error == 'Internal error, expecting something to modify in modify.'
+
+
+def test_rest_negative_modify_error_1():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('PATCH', 'svm/svms/uuid', SRR['generic_error']), # rename
+ ])
+ module_args = {
+ 'admin_state': 'running',
+ 'language': 'klingon',
+ 'services': {'nfs': {'allowed': True, 'enabled': True}}
+ }
+ my_obj = create_module(svm_module, DEFAULT_ARGS, module_args)
+ modify = {'enabled_protocols': ['nfs', 'fcp'], 'name': 'new_name', 'language': 'klingon'}
+ current = {'enabled_protocols': ['nfs'], 'disabled_protocols': ['fcp', 'iscsi', 'nvme'], 'uuid': 'uuid'}
+ error = expect_and_capture_ansible_exception(my_obj.modify_vserver, 'fail', modify, current)['msg']
+ assert error == 'Error in rename: calling: svm/svms/uuid: got Expected error.'
+
+
+def test_rest_negative_modify_error_2():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('PATCH', 'svm/svms/uuid', SRR['success']), # rename
+ ('PATCH', 'svm/svms/uuid', SRR['generic_error']), # modify
+ ])
+ module_args = {
+ 'admin_state': 'running',
+ 'language': 'klingon',
+ 'services': {'nfs': {'allowed': True, 'enabled': True}}
+ }
+ my_obj = create_module(svm_module, DEFAULT_ARGS, module_args)
+ modify = {'enabled_protocols': ['nfs', 'fcp'], 'name': 'new_name', 'language': 'klingon'}
+ current = {'enabled_protocols': ['nfs'], 'disabled_protocols': ['fcp', 'iscsi', 'nvme'], 'uuid': 'uuid'}
+ error = expect_and_capture_ansible_exception(my_obj.modify_vserver, 'fail', modify, current)['msg']
+ assert error == 'Error in modify: calling: svm/svms/uuid: got Expected error.'
+
+
+def test_rest_successfully_get_older_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'svm/svms', SRR['svm_record']),
+ ('GET', 'private/cli/vserver', SRR['cli_record']), # get protocols
+ ])
+ module_args = {
+ 'admin_state': 'running',
+ 'services': {'nfs': {'allowed': True, 'enabled': True}}
+ }
+ assert not create_and_apply(svm_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_successfully_add_protocols_on_create():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'svm/svms', SRR['zero_records']),
+ ('POST', 'svm/svms', SRR['success']),
+ ('PATCH', 'private/cli/vserver/add-protocols', SRR['success']),
+ ])
+ module_args = {
+ 'admin_state': 'running',
+ 'services': {'nfs': {'allowed': True, 'enabled': True}}
+ }
+ assert create_and_apply(svm_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_successfully_add_remove_protocols_on_modify():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'svm/svms', SRR['svm_record']),
+ ('GET', 'private/cli/vserver', SRR['cli_record']), # get protocols
+ ('PATCH', 'private/cli/vserver/add-protocols', SRR['success']),
+ ('PATCH', 'private/cli/vserver/remove-protocols', SRR['success'])
+ ])
+ module_args = {
+ 'admin_state': 'running',
+ 'services': {'nfs': {'allowed': True, 'enabled': True}, 'iscsi': {'allowed': False}, 'fcp': {'allowed': True}}
+ }
+ assert create_and_apply(svm_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_successfully_add_remove_protocols_on_modify_old_style():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'svm/svms', SRR['svm_record']),
+ ('GET', 'private/cli/vserver', SRR['cli_record']), # get protocols
+ ('PATCH', 'private/cli/vserver/add-protocols', SRR['success']),
+ ('PATCH', 'private/cli/vserver/remove-protocols', SRR['success'])
+ ])
+ module_args = {
+ 'admin_state': 'running',
+ 'allowed_protocols': ['nfs', 'fcp']
+ }
+ assert create_and_apply(svm_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_validate_int_or_string_as_int():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ])
+ module_args = {
+ 'admin_state': 'running',
+ 'services': {'nfs': {'allowed': True, 'enabled': True}}
+ }
+ assert create_module(svm_module, DEFAULT_ARGS, module_args).validate_int_or_string('10', 'whatever') is None
+
+
+def test_validate_int_or_string_as_str():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ])
+ module_args = {
+ 'admin_state': 'running',
+ 'services': {'nfs': {'allowed': True, 'enabled': True}}
+ }
+ assert create_module(svm_module, DEFAULT_ARGS, module_args).validate_int_or_string('whatever', 'whatever') is None
+
+
+def test_negative_validate_int_or_string():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ])
+ module_args = {
+ 'admin_state': 'running',
+ 'services': {'nfs': {'allowed': True, 'enabled': True}}
+ }
+ astring = 'testme'
+ error = expect_and_capture_ansible_exception(create_module(svm_module, DEFAULT_ARGS, module_args).validate_int_or_string, 'fail', '10a', astring)['msg']
+ assert "expecting int value or '%s'" % astring in error
+
+
+def test_rest_successfully_modify_with_admin_state():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'svm/svms', SRR['svm_record']),
+ ('PATCH', 'svm/svms/09e9fd5e-8ebd-11e9-b162-005056b39fe7', SRR['success']) # change admin_state
+ ])
+ module_args = {'admin_state': 'stopped'}
+ assert create_and_apply(svm_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_successfully_modify_with_create():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'svm/svms', SRR['zero_records']),
+ ('POST', 'svm/svms', SRR['success']),
+ ('GET', 'svm/svms', SRR['svm_record']),
+ ('PATCH', 'svm/svms/09e9fd5e-8ebd-11e9-b162-005056b39fe7', SRR['success']) # change admin_state
+ ])
+ module_args = {'admin_state': 'stopped'}
+ assert create_and_apply(svm_module, DEFAULT_ARGS, module_args)['changed']
+
+# Tests for web services - 4 cases
+# ZAPI: not supported
+# REST < 9.8: not supported
+# REST 9.8, 9.9. 9.10.0: only certificate is supported, using deprecated certificate fields in svs/svms
+# REST >= 9.10.1: all options are supported, using svm/svms/uuid/web
+
+
+def test_web_services_error_zapi():
+ register_responses([
+ ('GET', 'cluster', SRR['is_zapi']),
+ ])
+ module_args = {'web': {'certificate': 'cert_name'}}
+ msg = 'using web requires ONTAP 9.8 or later and REST must be enabled - Unreachable - using ZAPI.'
+ assert create_module(svm_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_web_services_error_9_7_5():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_7_5']),
+ ])
+ module_args = {'web': {'certificate': 'cert_name'}}
+ msg = 'using web requires ONTAP 9.8 or later and REST must be enabled - ONTAP version: 9.7.5 - using REST.'
+ assert create_module(svm_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_web_services_error_9_8_0():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ])
+ msg = "using ('client_enabled', 'ocsp_enabled') requires ONTAP 9.10.1 or later and REST must be enabled - ONTAP version: 9.8.0 - using REST."
+ module_args = {'web': {'certificate': 'cert_name', 'client_enabled': True}}
+ assert create_module(svm_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == msg
+ module_args = {'web': {'certificate': 'cert_name', 'ocsp_enabled': True}}
+ assert create_module(svm_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_web_services_modify_certificate_9_8_0_none_set():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/certificates', SRR['certificate_record_1']),
+ ('GET', 'svm/svms', SRR['svm_record']),
+ ('GET', 'private/cli/vserver', SRR['cli_record']), # get protocols
+ ('PATCH', 'svm/svms/09e9fd5e-8ebd-11e9-b162-005056b39fe7', SRR['success']) # change certificate
+ ])
+ module_args = {'web': {'certificate': 'cert_name'}}
+ assert create_and_apply(svm_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_web_services_modify_certificate_9_8_0_other_set():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/certificates', SRR['certificate_record_1']),
+ ('GET', 'svm/svms', SRR['svm_record_cert2']),
+ ('GET', 'private/cli/vserver', SRR['cli_record']), # get protocols
+ ('PATCH', 'svm/svms/09e9fd5e-8ebd-11e9-b162-005056b39fe7', SRR['success']) # change certificate
+ ])
+ module_args = {'web': {'certificate': 'cert_name'}}
+ assert create_and_apply(svm_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_web_services_modify_certificate_9_8_0_idempotent():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/certificates', SRR['certificate_record_1']),
+ ('GET', 'svm/svms', SRR['svm_record_cert1']),
+ ('GET', 'private/cli/vserver', SRR['cli_record']), # get protocols
+ ])
+ module_args = {'web': {'certificate': 'cert_name'}}
+ assert not create_and_apply(svm_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_web_services_modify_certificate_9_8_0_error_not_found():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/certificates', SRR['zero_records']),
+ ('GET', 'security/certificates', SRR['certificate_record_1']),
+ ])
+ module_args = {'web': {'certificate': 'cert_name'}}
+ msg = "Error certificate not found: {'name': 'cert_name'}. Current certificates with type=server: ['cert_1']"
+ assert create_module(svm_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_web_services_modify_certificate_9_8_0_error_api1():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/certificates', SRR['generic_error']),
+ ])
+ module_args = {'web': {'certificate': 'cert_name'}}
+ msg = "Error retrieving certificate {'name': 'cert_name'}: calling: security/certificates: got Expected error."
+ assert create_module(svm_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_web_services_modify_certificate_9_8_0_error_api2():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'security/certificates', SRR['zero_records']),
+ ('GET', 'security/certificates', SRR['generic_error']),
+ ])
+ module_args = {'web': {'certificate': 'cert_name'}}
+ msg = "Error retrieving certificates: calling: security/certificates: got Expected error."
+ assert create_module(svm_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_web_services_modify_certificate_9_10_1_none_set():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'security/certificates', SRR['certificate_record_1']),
+ ('GET', 'svm/svms', SRR['svm_record']),
+ ('GET', 'svm/svms/09e9fd5e-8ebd-11e9-b162-005056b39fe7/web', SRR['zero_records']),
+ ('PATCH', 'svm/svms/09e9fd5e-8ebd-11e9-b162-005056b39fe7/web', SRR['success']) # change certificate
+ ])
+ module_args = {'web': {'certificate': 'cert_name'}}
+ assert create_and_apply(svm_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_web_services_modify_certificate_9_10_1_other_set():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'security/certificates', SRR['certificate_record_1']),
+ ('GET', 'svm/svms', SRR['svm_record']),
+ ('GET', 'svm/svms/09e9fd5e-8ebd-11e9-b162-005056b39fe7/web', SRR['svm_web_record_2']),
+ ('PATCH', 'svm/svms/09e9fd5e-8ebd-11e9-b162-005056b39fe7/web', SRR['success']) # change certificate
+ ])
+ module_args = {'web': {'certificate': 'cert_name'}}
+ assert create_and_apply(svm_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_web_services_modify_certificate_9_10_1_idempotent():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'security/certificates', SRR['certificate_record_1']),
+ ('GET', 'svm/svms', SRR['svm_record']),
+ ('GET', 'svm/svms/09e9fd5e-8ebd-11e9-b162-005056b39fe7/web', SRR['svm_web_record_1']),
+ ])
+ module_args = {'web': {'certificate': 'cert_name'}}
+ assert not create_and_apply(svm_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_web_services_modify_certificate_9_10_1_error_not_found():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'security/certificates', SRR['zero_records']),
+ ('GET', 'security/certificates', SRR['certificate_record_1']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'security/certificates', SRR['zero_records']),
+ ('GET', 'security/certificates', SRR['zero_records']),
+ ])
+ module_args = {'web': {'certificate': 'cert_name'}}
+ msg = "Error certificate not found: {'name': 'cert_name'}. Current certificates with type=server: ['cert_1']"
+ assert create_module(svm_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == msg
+ msg = "Error certificate not found: {'name': 'cert_name'}. Current certificates with type=server: []"
+ assert create_module(svm_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_web_services_modify_certificate_9_10_1_error_api1():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'security/certificates', SRR['generic_error']),
+ ])
+ module_args = {'web': {'certificate': 'cert_name'}}
+ msg = "Error retrieving certificate {'name': 'cert_name'}: calling: security/certificates: got Expected error."
+ assert create_module(svm_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_web_services_modify_certificate_9_10_1_error_api2():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'security/certificates', SRR['zero_records']),
+ ('GET', 'security/certificates', SRR['generic_error']),
+ ])
+ module_args = {'web': {'certificate': 'cert_name'}}
+ msg = "Error retrieving certificates: calling: security/certificates: got Expected error."
+ assert create_module(svm_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_web_services_modify_certificate_9_10_1_error_api3():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'security/certificates', SRR['certificate_record_1']),
+ ('GET', 'svm/svms', SRR['svm_record']),
+ ('GET', 'svm/svms/09e9fd5e-8ebd-11e9-b162-005056b39fe7/web', SRR['generic_error']),
+ ])
+ module_args = {'web': {'certificate': 'cert_name'}}
+ msg = 'Error retrieving web info: calling: svm/svms/09e9fd5e-8ebd-11e9-b162-005056b39fe7/web: got Expected error.'
+ assert create_and_apply(svm_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_web_services_modify_certificate_9_10_1_error_api4():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'security/certificates', SRR['certificate_record_1']),
+ ('GET', 'svm/svms', SRR['svm_record']),
+ ('GET', 'svm/svms/09e9fd5e-8ebd-11e9-b162-005056b39fe7/web', SRR['svm_web_record_2']),
+ ('PATCH', 'svm/svms/09e9fd5e-8ebd-11e9-b162-005056b39fe7/web', SRR['generic_error']) # change certificate
+ ])
+ module_args = {'web': {'certificate': 'cert_name'}}
+ msg = "Error in modify web service for {'certificate': {'uuid': 'cert_uuid_1'}}: "\
+ "calling: svm/svms/09e9fd5e-8ebd-11e9-b162-005056b39fe7/web: got Expected error."
+ assert create_and_apply(svm_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_web_services_modify_certificate_9_10_1_warning():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'security/certificates', SRR['certificate_record_1']),
+ ])
+ module_args = {'web': {'certificate': 'cert_name'}}
+ msg = "Error in modify web service for {'certificate': {'uuid': 'cert_uuid_1'}}: "\
+ "calling: svm/svms/09e9fd5e-8ebd-11e9-b162-005056b39fe7/web: got Expected error."
+ my_obj = create_module(svm_module, DEFAULT_ARGS, module_args)
+ assert my_obj.modify_web_services({}, {'uuid': 'uuid'}) is None
+ assert_warning_was_raised('Nothing to change: {}')
+ clear_warnings()
+ assert my_obj.modify_web_services({'certificate': {'name': 'whatever'}}, {'uuid': 'uuid'}) is None
+ assert_warning_was_raised("Nothing to change: {'certificate': {}}")
+ clear_warnings()
+ assert my_obj.modify_web_services({'certificate': {}}, {'uuid': 'uuid'}) is None
+ assert_warning_was_raised("Nothing to change: {'certificate': {}}")
+
+
+def test_rest_cli_max_volumes_get():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'svm/svms', SRR['svm_record_ap']),
+ ('GET', 'private/cli/vserver', SRR['cli_record']),
+ ])
+ module_args = {
+ 'max_volumes': 3333,
+ }
+ my_obj = create_module(svm_module, DEFAULT_ARGS, module_args)
+ record = my_obj.get_vserver()
+ assert 'name' in SRR['svm_record_ap'][1]['records'][0]
+ assert 'max_volumes' not in SRR['svm_record_ap'][1]['records'][0]
+ assert 'max_volumes' in record
+
+
+def test_rest_cli_max_volumes_create():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'svm/svms', SRR['zero_records']),
+ ('POST', 'svm/svms', SRR['success']),
+ ('PATCH', 'private/cli/vserver', SRR['success']),
+ ])
+ module_args = {
+ 'max_volumes': 3333,
+ }
+ assert create_and_apply(svm_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_cli_max_volumes_modify():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'svm/svms', SRR['svm_record_ap']),
+ ('GET', 'private/cli/vserver', SRR['cli_record']),
+ ('PATCH', 'private/cli/vserver', SRR['success']),
+ ])
+ module_args = {
+ 'max_volumes': 3333,
+ }
+ assert create_and_apply(svm_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_rest_cli_max_volumes_get():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'svm/svms', SRR['svm_record_ap']),
+ ('GET', 'private/cli/vserver', SRR['generic_error']),
+ ])
+ module_args = {
+ 'max_volumes': 3333,
+ }
+ msg = 'Error getting vserver info: calling: private/cli/vserver: got Expected error. - None'
+ assert create_and_apply(svm_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_error_rest_cli_max_volumes_modify():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'svm/svms', SRR['svm_record_ap']),
+ ('GET', 'private/cli/vserver', SRR['cli_record']),
+ ('PATCH', 'private/cli/vserver', SRR['generic_error']),
+ ])
+ module_args = {
+ 'max_volumes': 3333,
+ }
+ msg = 'Error updating max_volumes: calling: private/cli/vserver: got Expected error. - None'
+ assert create_and_apply(svm_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_rest_cli_add_remove_protocols_create():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'svm/svms', SRR['zero_records']),
+ ('POST', 'svm/svms', SRR['success']),
+ ('PATCH', 'private/cli/vserver/add-protocols', SRR['success']),
+ ('PATCH', 'private/cli/vserver/remove-protocols', SRR['success']),
+ ])
+ module_args = {
+ 'allowed_protocols': 'nfs,cifs',
+ }
+ assert create_and_apply(svm_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_rest_cli_add_protocols_create():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'svm/svms', SRR['zero_records']),
+ ('POST', 'svm/svms', SRR['success']),
+ ('PATCH', 'private/cli/vserver/add-protocols', SRR['generic_error']),
+ ])
+ module_args = {
+ 'allowed_protocols': 'nfs,cifs',
+ }
+ msg = 'Error adding protocols: calling: private/cli/vserver/add-protocols: got Expected error. - None'
+ assert create_and_apply(svm_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_rest_cli_remove_protocols_modify():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'svm/svms', SRR['svm_record_ap']),
+ ('GET', 'private/cli/vserver', SRR['cli_record']),
+ ('PATCH', 'private/cli/vserver/remove-protocols', SRR['success']),
+ ])
+ module_args = {
+ 'allowed_protocols': 'nfs,cifs',
+ }
+ assert create_and_apply(svm_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_rest_cli_remove_protocols_modify():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'svm/svms', SRR['svm_record_ap']),
+ ('GET', 'private/cli/vserver', SRR['cli_record']),
+ ('PATCH', 'private/cli/vserver/remove-protocols', SRR['generic_error']),
+ ])
+ module_args = {
+ 'allowed_protocols': 'nfs,cifs',
+ }
+ msg = 'Error removing protocols: calling: private/cli/vserver/remove-protocols: got Expected error. - None'
+ assert create_and_apply(svm_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_add_parameter_to_dict():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ])
+ module_args = {
+ 'name': 'svm',
+ 'ipspace': 'ipspace',
+ 'max_volumes': 3333,
+ }
+ my_obj = create_module(svm_module, DEFAULT_ARGS, module_args)
+ test_dict = {}
+ my_obj.add_parameter_to_dict(test_dict, 'name', None)
+ my_obj.add_parameter_to_dict(test_dict, 'ipspace', 'ipspace_key')
+ my_obj.add_parameter_to_dict(test_dict, 'max_volumes', None, True)
+ print(test_dict)
+ assert test_dict['name'] == 'svm'
+ assert test_dict['ipspace_key'] == 'ipspace'
+ assert test_dict['max_volumes'] == '3333'
+
+
+def test_rest_language_match():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'svm/svms', SRR['svm_record_ap']),
+ ('GET', 'private/cli/vserver', SRR['cli_record']),
+ ('PATCH', 'svm/svms/svm_uuid', SRR['success']),
+ ])
+ module_args = {
+ 'language': 'de.UTF-8'
+ }
+ assert create_and_apply(svm_module, DEFAULT_ARGS, module_args)['changed']
+ print_warnings()
+ assert_warning_was_raised(
+ 'Attempting to change language from ONTAP value de.utf_8 to de.UTF-8. Use de.utf_8 to suppress this warning and maintain idempotency.')
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_template.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_template.py
new file mode 100644
index 000000000..b548739a8
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_template.py
@@ -0,0 +1,86 @@
+# (c) 2018, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test template for ONTAP Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat import unittest
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_cg_snapshot \
+ import NetAppONTAPCGSnapshot as my_module # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+class MockONTAPConnection(object):
+ ''' mock server connection to ONTAP host '''
+
+ def __init__(self, kind=None, parm1=None):
+ ''' save arguments '''
+ self.type = kind
+ self.parm1 = parm1
+ self.xml_in = None
+ self.xml_out = None
+
+ def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
+ ''' mock invoke_successfully returning xml data '''
+ self.xml_in = xml
+ if self.type == 'vserver':
+ xml = self.build_vserver_info(self.parm1)
+ self.xml_out = xml
+ return xml
+
+ @staticmethod
+ def build_vserver_info(vserver):
+ ''' build xml data for vserser-info '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ attributes = netapp_utils.zapi.NaElement('attributes-list')
+ attributes.add_node_with_children('vserver-info',
+ **{'vserver-name': vserver})
+ xml.add_child_elem(attributes)
+ # print(xml.to_string())
+ return xml
+
+
+class TestMyModule(unittest.TestCase):
+ ''' a group of related Unit Tests '''
+
+ def setUp(self):
+ self.server = MockONTAPConnection()
+
+ def test_module_fail_when_required_args_missing(self):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ my_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+ def test_ensure_command_called(self):
+ ''' a more interesting test '''
+# TODO: change argument names/values
+ set_module_args({
+ 'vserver': 'vserver',
+ 'volumes': 'volumes',
+ 'snapshot': 'snapshot',
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ })
+ my_obj = my_module()
+ my_obj.server = self.server
+ with pytest.raises(AnsibleFailJson) as exc:
+ # It may not be a good idea to start with apply
+ # More atomic methods can be easier to mock
+ # Hint: start with get methods, as they are called first
+ my_obj.apply()
+# TODO: change message, and maybe test contents
+ msg = 'Error fetching CG ID for CG commit snapshot'
+ assert exc.value.args[0]['msg'] == msg
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ucadapter.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ucadapter.py
new file mode 100644
index 000000000..5f1f502c1
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_ucadapter.py
@@ -0,0 +1,173 @@
+# (c) 2018-2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests ONTAP Ansible module: na_ontap_ucadapter '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+import sys
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import patch_ansible,\
+ create_module, create_and_apply, expect_and_capture_ansible_exception
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke,\
+ register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_responses
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_ucadapter \
+ import NetAppOntapadapter as ucadapter_module # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not be available')
+
+DEFAULT_ARGS = {
+ 'hostname': '10.0.0.0',
+ 'username': 'user',
+ 'password': 'pass',
+ 'node_name': 'node1',
+ 'adapter_name': '0f',
+ 'mode': 'fc',
+ 'type': 'target',
+ 'use_rest': 'never'
+}
+
+ucm_info_mode_fc = {
+ 'attributes': {
+ 'uc-adapter-info': {
+ 'mode': 'fc',
+ 'pending-mode': 'abc',
+ 'type': 'target',
+ 'pending-type': 'intitiator',
+ 'status': 'up',
+ }
+ }
+}
+
+ucm_info_mode_cna = {
+ 'attributes': {
+ 'uc-adapter-info': {
+ 'mode': 'cna',
+ 'pending-mode': 'cna',
+ 'type': 'target',
+ 'pending-type': 'intitiator',
+ 'status': 'up',
+ }
+ }
+}
+
+
+ZRR = zapi_responses({
+ 'ucm_info': build_zapi_response(ucm_info_mode_fc),
+ 'ucm_info_cna': build_zapi_response(ucm_info_mode_cna)
+})
+
+
+SRR = rest_responses({
+ 'ucm_info': (200, {"records": [{
+ 'current_mode': 'fc',
+ 'current_type': 'target',
+ 'status_admin': 'up'
+ }], "num_records": 1}, None),
+ 'ucm_info_cna': (200, {"records": [{
+ 'current_mode': 'cna',
+ 'current_type': 'target',
+ 'status_admin': 'up'
+ }], "num_records": 1}, None),
+ 'fc_adapter_info': (200, {"records": [{
+ 'uuid': 'abcdef'
+ }], "num_records": 1}, None)
+})
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ # with python 2.6, dictionaries are not ordered
+ fragments = ["missing required arguments:", "hostname", "node_name", "adapter_name"]
+ error = create_module(ucadapter_module, {}, fail=True)['msg']
+ for fragment in fragments:
+ assert fragment in error
+
+
+def test_ensure_ucadapter_get_called():
+ ''' fetching ucadapter details '''
+ register_responses([
+ ('ucm-adapter-get', ZRR['empty'])
+ ])
+ ucm_obj = create_module(ucadapter_module, DEFAULT_ARGS)
+ assert ucm_obj.get_adapter() is None
+
+
+def test_change_mode_from_cna_to_fc():
+ ''' configuring ucadaptor and checking idempotency '''
+ register_responses([
+ ('ucm-adapter-get', ZRR['ucm_info_cna']),
+ ('fcp-adapter-config-down', ZRR['success']),
+ ('ucm-adapter-modify', ZRR['success']),
+ ('fcp-adapter-config-up', ZRR['success']),
+ ('ucm-adapter-get', ZRR['ucm_info_cna'])
+ ])
+ assert create_and_apply(ucadapter_module, DEFAULT_ARGS)['changed']
+ args = {'mode': 'cna'}
+ assert not create_and_apply(ucadapter_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_change_mode_from_fc_to_cna():
+ register_responses([
+ ('ucm-adapter-get', ZRR['ucm_info']),
+ ('fcp-adapter-config-down', ZRR['success']),
+ ('ucm-adapter-modify', ZRR['success']),
+ ('fcp-adapter-config-up', ZRR['success']),
+ ])
+ args = {'mode': 'cna'}
+ assert create_and_apply(ucadapter_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_if_all_methods_catch_exception():
+ register_responses([
+ ('ucm-adapter-get', ZRR['error']),
+ ('ucm-adapter-modify', ZRR['error']),
+ ('fcp-adapter-config-down', ZRR['error']),
+ ('fcp-adapter-config-up', ZRR['error']),
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'network/fc/ports', SRR['generic_error']),
+ ('GET', 'private/cli/ucadmin', SRR['generic_error']),
+ ('PATCH', 'private/cli/ucadmin', SRR['generic_error']),
+ ('PATCH', 'network/fc/ports/abcdef', SRR['generic_error']),
+ ('PATCH', 'network/fc/ports/abcdef', SRR['generic_error']),
+ ('GET', 'network/fc/ports', SRR['empty_records'])
+ ])
+ ucm_obj = create_module(ucadapter_module, DEFAULT_ARGS)
+ assert 'Error fetching ucadapter' in expect_and_capture_ansible_exception(ucm_obj.get_adapter, 'fail')['msg']
+ assert 'Error modifying adapter' in expect_and_capture_ansible_exception(ucm_obj.modify_adapter, 'fail')['msg']
+ assert 'Error trying to down' in expect_and_capture_ansible_exception(ucm_obj.online_or_offline_adapter, 'fail', 'down', '0f')['msg']
+ assert 'Error trying to up' in expect_and_capture_ansible_exception(ucm_obj.online_or_offline_adapter, 'fail', 'up', '0f')['msg']
+
+ ucm_obj = create_module(ucadapter_module, DEFAULT_ARGS, {'use_rest': 'always'})
+ ucm_obj.adapters_uuids = {'0f': 'abcdef'}
+ assert 'Error fetching adapter 0f uuid' in expect_and_capture_ansible_exception(ucm_obj.get_adapter_uuid, 'fail', '0f')['msg']
+ assert 'Error fetching ucadapter' in expect_and_capture_ansible_exception(ucm_obj.get_adapter, 'fail')['msg']
+ assert 'Error modifying adapter' in expect_and_capture_ansible_exception(ucm_obj.modify_adapter, 'fail')['msg']
+ assert 'Error trying to down' in expect_and_capture_ansible_exception(ucm_obj.online_or_offline_adapter, 'fail', 'down', '0f')['msg']
+ assert 'Error trying to up' in expect_and_capture_ansible_exception(ucm_obj.online_or_offline_adapter, 'fail', 'up', '0f')['msg']
+ assert 'Error: Adapter(s) 0f not exist' in expect_and_capture_ansible_exception(ucm_obj.get_adapters_uuids, 'fail')['msg']
+
+
+def test_change_mode_from_cna_to_fc_rest():
+ ''' configuring ucadaptor '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'private/cli/ucadmin', SRR['ucm_info_cna']),
+ ('GET', 'network/fc/ports', SRR['fc_adapter_info']),
+ ('PATCH', 'network/fc/ports/abcdef', SRR['success']),
+ ('PATCH', 'private/cli/ucadmin', SRR['success']),
+ ('PATCH', 'network/fc/ports/abcdef', SRR['success']),
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'private/cli/ucadmin', SRR['ucm_info_cna'])
+ ])
+ assert create_and_apply(ucadapter_module, DEFAULT_ARGS, {'use_rest': 'always'})['changed']
+ args = {'mode': 'cna', 'use_rest': 'always'}
+ assert not create_and_apply(ucadapter_module, DEFAULT_ARGS, args)['changed']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_unix_group.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_unix_group.py
new file mode 100644
index 000000000..a29779e5c
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_unix_group.py
@@ -0,0 +1,545 @@
+# (c) 2019-2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test template for ONTAP Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ patch_ansible, create_module, create_and_apply, expect_and_capture_ansible_exception, AnsibleFailJson
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke, register_responses, get_mock_record
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_unix_group \
+ import NetAppOntapUnixGroup as group_module # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+# REST API canned responses when mocking send_request
+SRR = rest_responses({
+ # module specific responses
+ 'user_record': (
+ 200,
+ {
+ "records": [
+ {
+ "svm": {
+ "uuid": "671aa46e-11ad-11ec-a267-005056b30cfa",
+ "name": "vserver"
+ },
+ "name": "user_group",
+ "id": 1,
+ "users": [{"name": "user1"}, {"name": "user2"}],
+ "target": {
+ "name": "20:05:00:50:56:b3:0c:fa"
+ }
+ }
+ ],
+ "num_records": 1
+ }, None
+ ),
+ "no_record": (
+ 200,
+ {"num_records": 0},
+ None)
+})
+
+unix_group_info = {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'unix-group-info': {
+ 'group-name': 'user_group',
+ 'group-id': '1',
+ 'users': [{'unix-user-name': {'user-name': 'user1'}}]
+ }
+ }
+}
+
+
+ZRR = zapi_responses({
+ 'unix_group_info': build_zapi_response(unix_group_info)
+})
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'vserver': 'vserver',
+ 'name': 'user_group',
+ 'id': '1',
+ 'use_rest': 'never',
+}
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ group_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+
+def test_get_nonexistent_user_group():
+ ''' Test if get_unix_group returns None for non-existent group '''
+ register_responses([
+ ('name-mapping-unix-group-get-iter', ZRR['empty'])
+ ])
+ user_obj = create_module(group_module, DEFAULT_ARGS)
+ result = user_obj.get_unix_group()
+ assert result is None
+
+
+def test_get_user_group():
+ ''' Test if get_unix_group returns unix group '''
+ register_responses([
+ ('name-mapping-unix-group-get-iter', ZRR['unix_group_info'])
+ ])
+ user_obj = create_module(group_module, DEFAULT_ARGS)
+ result = user_obj.get_unix_group()
+ assert result
+
+
+def test_get_error_existent_user_group():
+ ''' Test if get_unix_user returns existent user group '''
+ register_responses([
+ ('name-mapping-unix-group-get-iter', ZRR['error'])
+ ])
+ group_module_object = create_module(group_module, DEFAULT_ARGS)
+ msg = "Error getting UNIX group"
+ assert msg in expect_and_capture_ansible_exception(group_module_object.get_unix_group, 'fail')['msg']
+
+
+def test_create_unix_group_zapi():
+ register_responses([
+ ('name-mapping-unix-group-get-iter', ZRR['empty']),
+ ('name-mapping-unix-group-create', ZRR['success']),
+ ])
+ module_args = {
+ 'name': 'user_group',
+ 'id': '1'
+ }
+ assert create_and_apply(group_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_create_unix_group_with_user_zapi():
+ register_responses([
+ ('name-mapping-unix-group-get-iter', ZRR['empty']),
+ ('name-mapping-unix-group-create', ZRR['success']),
+ ('name-mapping-unix-group-get-iter', ZRR['unix_group_info']),
+ ('name-mapping-unix-group-add-user', ZRR['success'])
+ ])
+ module_args = {
+ 'name': 'user_group',
+ 'id': '1',
+ 'users': ['user1', 'user2']
+ }
+ assert create_and_apply(group_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_create_unix_user_zapi():
+ register_responses([
+ ('name-mapping-unix-group-get-iter', ZRR['empty']),
+ ('name-mapping-unix-group-create', ZRR['error']),
+ ])
+ module_args = {
+ 'name': 'user_group',
+ 'id': '1',
+ 'users': ['user1', 'user2']
+ }
+ error = create_and_apply(group_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ msg = "Error creating UNIX group"
+ assert msg in error
+
+
+def test_delete_unix_group_zapi():
+ register_responses([
+ ('name-mapping-unix-group-get-iter', ZRR['unix_group_info']),
+ ('name-mapping-unix-group-destroy', ZRR['success']),
+ ])
+ module_args = {
+ 'name': 'user_group',
+ 'state': 'absent'
+ }
+ assert create_and_apply(group_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_remove_unix_group_zapi():
+ register_responses([
+ ('name-mapping-unix-group-get-iter', ZRR['unix_group_info']),
+ ('name-mapping-unix-group-destroy', ZRR['error']),
+ ])
+ module_args = {
+ 'name': 'user_group',
+ 'state': 'absent'
+ }
+ error = create_and_apply(group_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ msg = "Error removing UNIX group"
+ assert msg in error
+
+
+def test_create_idempotent():
+ register_responses([
+ ('name-mapping-unix-group-get-iter', ZRR['unix_group_info'])
+ ])
+ module_args = {
+ 'state': 'present',
+ 'name': 'user_group',
+ 'id': '1',
+ }
+ assert not create_and_apply(group_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_delete_idempotent():
+ register_responses([
+ ('name-mapping-unix-group-get-iter', ZRR['empty'])
+ ])
+ module_args = {
+ 'state': 'absent',
+ 'name': 'user_group',
+ }
+ assert not create_and_apply(group_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_unix_group_id_zapi():
+ register_responses([
+ ('name-mapping-unix-group-get-iter', ZRR['unix_group_info']),
+ ('name-mapping-unix-group-modify', ZRR['success']),
+ ])
+ module_args = {
+ 'name': 'user_group',
+ 'id': '2'
+ }
+ assert create_and_apply(group_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_modify_unix_group_id_zapi():
+ register_responses([
+ ('name-mapping-unix-group-get-iter', ZRR['unix_group_info']),
+ ('name-mapping-unix-group-modify', ZRR['error']),
+ ])
+ module_args = {
+ 'name': 'user_group',
+ 'id': '2'
+ }
+ error = create_and_apply(group_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ msg = "Error modifying UNIX group"
+ assert msg in error
+
+
+def test_add_unix_group_user_zapi():
+ register_responses([
+ ('name-mapping-unix-group-get-iter', ZRR['unix_group_info']),
+ ('name-mapping-unix-group-get-iter', ZRR['unix_group_info']),
+ ('name-mapping-unix-group-add-user', ZRR['success'])
+ ])
+ module_args = {
+ 'name': 'user_group',
+ 'users': ['user1', 'user2']
+ }
+ assert create_and_apply(group_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_add_unix_group_user_zapi():
+ register_responses([
+ ('name-mapping-unix-group-get-iter', ZRR['unix_group_info']),
+ ('name-mapping-unix-group-get-iter', ZRR['unix_group_info']),
+ ('name-mapping-unix-group-add-user', ZRR['error'])
+ ])
+ module_args = {
+ 'name': 'user_group',
+ 'users': ['user1', 'user2']
+ }
+ error = create_and_apply(group_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ msg = "Error adding user"
+ assert msg in error
+
+
+def test_delete_unix_group_user_zapi():
+ register_responses([
+ ('name-mapping-unix-group-get-iter', ZRR['unix_group_info']),
+ ('name-mapping-unix-group-get-iter', ZRR['unix_group_info']),
+ ('name-mapping-unix-group-delete-user', ZRR['success'])
+ ])
+ module_args = {
+ 'name': 'user_group',
+ 'users': ''
+ }
+ assert create_and_apply(group_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_delete_unix_group_user_zapi():
+ register_responses([
+ ('name-mapping-unix-group-get-iter', ZRR['unix_group_info']),
+ ('name-mapping-unix-group-get-iter', ZRR['unix_group_info']),
+ ('name-mapping-unix-group-delete-user', ZRR['error'])
+ ])
+ module_args = {
+ 'name': 'user_group',
+ 'users': ''
+ }
+ error = create_and_apply(group_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ msg = "Error deleting user"
+ assert msg in error
+
+
+def test_if_all_methods_catch_exception():
+ register_responses([
+ ('name-mapping-unix-group-get-iter', ZRR['error']),
+ ('name-mapping-unix-group-create', ZRR['error']),
+ ('name-mapping-unix-group-destroy', ZRR['error']),
+ ('name-mapping-unix-group-modify', ZRR['error']),
+ ])
+ module_args = {'use_rest': 'never', 'name': 'user_group'}
+ my_obj = create_module(group_module, DEFAULT_ARGS, module_args)
+
+ error = expect_and_capture_ansible_exception(my_obj.get_unix_group, 'fail')['msg']
+ assert 'Error getting UNIX group user_group: NetApp API failed. Reason - 12345:synthetic error for UT purpose' in error
+
+ error = expect_and_capture_ansible_exception(my_obj.create_unix_group, 'fail')['msg']
+ assert 'Error creating UNIX group user_group: NetApp API failed. Reason - 12345:synthetic error for UT purpose' in error
+
+ error = expect_and_capture_ansible_exception(my_obj.delete_unix_group, 'fail')['msg']
+ assert 'Error removing UNIX group user_group: NetApp API failed. Reason - 12345:synthetic error for UT purpose' in error
+
+ error = expect_and_capture_ansible_exception(my_obj.modify_unix_group, 'fail', 'name-mapping-unix-group-modify')['msg']
+ assert 'Error modifying UNIX group user_group: NetApp API failed. Reason - 12345:synthetic error for UT purpose' in error
+
+
+ARGS_REST = {
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'use_rest': 'always',
+ 'vserver': 'vserver',
+ 'name': 'user_group',
+ 'id': '1'
+}
+
+
+def test_get_nonexistent_user_group_rest():
+ ''' Test if get_unix_user returns None for non-existent user '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'name-services/unix-groups', SRR['empty_records']),
+ ])
+ user_obj = create_module(group_module, ARGS_REST)
+ result = user_obj.get_unix_group_rest()
+ assert result is None
+
+
+def test_get_existent_user_group_rest():
+ ''' Test if get_unix_user returns existent user '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'name-services/unix-groups', SRR['user_record']),
+ ])
+ user_obj = create_module(group_module, ARGS_REST)
+ result = user_obj.get_unix_group_rest()
+ assert result
+
+
+def test_get_error_existent_user_group_rest():
+ ''' Test if get_unix_user returns existent user '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'name-services/unix-groups', SRR['generic_error']),
+ ])
+ error = create_and_apply(group_module, ARGS_REST, fail=True)['msg']
+ msg = "Error getting UNIX group:"
+ assert msg in error
+
+
+def test_ontap_version_rest():
+ ''' Test ONTAP version '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ])
+ module_args = {'use_rest': 'always'}
+ error = create_module(group_module, ARGS_REST, module_args, fail=True)['msg']
+ msg = "Error: REST requires ONTAP 9.9.1 or later for UNIX group APIs."
+ assert msg in error
+
+
+def test_create_unix_group_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'name-services/unix-groups', SRR['empty_records']),
+ ('POST', 'name-services/unix-groups', SRR['empty_good']),
+ ])
+ module_args = {
+ 'name': 'user_group',
+ 'id': 1,
+ }
+ assert create_and_apply(group_module, ARGS_REST, module_args)['changed']
+
+
+def test_create_unix_group_with_user_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'name-services/unix-groups', SRR['empty_records']),
+ ('POST', 'name-services/unix-groups', SRR['empty_good']),
+ ('GET', 'name-services/unix-groups', SRR['user_record']),
+ ('POST', 'name-services/unix-groups/671aa46e-11ad-11ec-a267-005056b30cfa/user_group/users', SRR['empty_records'])
+ ])
+ module_args = {
+ 'name': 'user_group',
+ 'id': 1,
+ 'users': ['user1', 'user2', 'user3']
+ }
+ assert create_and_apply(group_module, ARGS_REST, module_args)['changed']
+
+
+def test_error_create_unix_group_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'name-services/unix-groups', SRR['empty_records']),
+ ('POST', 'name-services/unix-groups', SRR['generic_error']),
+ ])
+ module_args = {
+ 'name': 'user_group',
+ 'id': 1,
+ }
+ error = create_and_apply(group_module, ARGS_REST, module_args, fail=True)['msg']
+ msg = "Error creating UNIX group:"
+ assert msg in error
+
+
+def test_delete_unix_group_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'name-services/unix-groups', SRR['user_record']),
+ ('DELETE', 'name-services/unix-groups/671aa46e-11ad-11ec-a267-005056b30cfa/user_group', SRR['empty_good']),
+ ])
+ module_args = {
+ 'name': 'user_group',
+ 'state': 'absent'
+ }
+ assert create_and_apply(group_module, ARGS_REST, module_args)['changed']
+
+
+def test_error_remove_unix_user_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'name-services/unix-groups', SRR['user_record']),
+ ('DELETE', 'name-services/unix-groups/671aa46e-11ad-11ec-a267-005056b30cfa/user_group', SRR['generic_error']),
+ ])
+ module_args = {
+ 'name': 'user_group',
+ 'state': 'absent'
+ }
+ error = create_and_apply(group_module, ARGS_REST, module_args, fail=True)['msg']
+ msg = "Error deleting UNIX group:"
+ assert msg in error
+
+
+def test_modify_unix_group_id_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'name-services/unix-groups', SRR['user_record']),
+ ('PATCH', 'name-services/unix-groups/671aa46e-11ad-11ec-a267-005056b30cfa/user_group', SRR['empty_good'])
+ ])
+ module_args = {
+ 'name': 'user_group',
+ 'id': '2'
+ }
+ assert create_and_apply(group_module, ARGS_REST, module_args)['changed']
+
+
+def test_error_modify_unix_group_id_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'name-services/unix-groups', SRR['user_record']),
+ ('PATCH', 'name-services/unix-groups/671aa46e-11ad-11ec-a267-005056b30cfa/user_group', SRR['generic_error'])
+ ])
+ module_args = {
+ 'name': 'user_group',
+ 'id': '2'
+ }
+ error = create_and_apply(group_module, ARGS_REST, module_args, fail=True)['msg']
+ msg = "Error on modifying UNIX group:"
+ assert msg in error
+
+
+def test_create_idempotent_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'name-services/unix-groups', SRR['user_record']),
+ ])
+ module_args = {
+ 'state': 'present',
+ 'name': 'user_group',
+ 'id': '1',
+ }
+ assert not create_and_apply(group_module, ARGS_REST, module_args)['changed']
+
+
+def test_delete_idempotent_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'name-services/unix-groups', SRR['empty_records']),
+ ])
+ module_args = {
+ 'state': 'absent',
+ 'name': 'user_group'
+ }
+ assert not create_and_apply(group_module, ARGS_REST, module_args)['changed']
+
+
+def test_add_unix_group_user_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'name-services/unix-groups', SRR['user_record']),
+ ('POST', 'name-services/unix-groups/671aa46e-11ad-11ec-a267-005056b30cfa/user_group/users', SRR['empty_records'])
+ ])
+ module_args = {
+ 'name': 'user_group',
+ 'users': ['user1', 'user2', 'user3']
+ }
+ assert create_and_apply(group_module, ARGS_REST, module_args)['changed']
+
+
+def test_error_add_unix_group_user_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'name-services/unix-groups', SRR['user_record']),
+ ('POST', 'name-services/unix-groups/671aa46e-11ad-11ec-a267-005056b30cfa/user_group/users', SRR['generic_error'])
+ ])
+ module_args = {
+ 'name': 'user_group',
+ 'users': ['user1', 'user2', 'user3']
+ }
+ error = create_and_apply(group_module, ARGS_REST, module_args, fail=True)['msg']
+ msg = "Error Adding user to UNIX group:"
+ assert msg in error
+
+
+def test_delete_unix_group_user_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'name-services/unix-groups', SRR['user_record']),
+ ('DELETE', 'name-services/unix-groups/671aa46e-11ad-11ec-a267-005056b30cfa/user_group/users/user2', SRR['empty_records'])
+ ])
+ module_args = {
+ 'users': ["user1"]
+ }
+ assert create_and_apply(group_module, ARGS_REST, module_args)['changed']
+
+
+def test_error_delete_unix_group_user_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_1']),
+ ('GET', 'name-services/unix-groups', SRR['user_record']),
+ ('DELETE', 'name-services/unix-groups/671aa46e-11ad-11ec-a267-005056b30cfa/user_group/users/user2', SRR['generic_error'])
+ ])
+ module_args = {
+ 'users': ["user1"]
+ }
+ error = create_and_apply(group_module, ARGS_REST, module_args, fail=True)['msg']
+ msg = "Error removing user from UNIX group:"
+ assert msg in error
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_unix_user.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_unix_user.py
new file mode 100644
index 000000000..1f6fc0847
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_unix_user.py
@@ -0,0 +1,465 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test template for ONTAP Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ patch_ansible, create_module, create_and_apply, expect_and_capture_ansible_exception, AnsibleFailJson
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke, register_responses, get_mock_record
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_unix_user \
+ import NetAppOntapUnixUser as user_module # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+# REST API canned responses when mocking send_request
+SRR = rest_responses({
+ # module specific responses
+ 'user_record': (
+ 200,
+ {
+ "records": [
+ {
+ "svm": {
+ "uuid": "671aa46e-11ad-11ec-a267-005056b30cfa",
+ "name": "vserver"
+ },
+ "name": "user",
+ "primary_gid": 2,
+ "id": 1,
+ "full_name": "test_user",
+ "target": {
+ "name": "20:05:00:50:56:b3:0c:fa"
+ }
+ }
+ ],
+ "num_records": 1
+ }, None
+ ),
+ "no_record": (
+ 200,
+ {"num_records": 0},
+ None)
+})
+
+unix_user_info = {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'unix-user-info': {
+ 'name': 'user',
+ 'user-id': '1',
+ 'group-id': 2,
+ 'full-name': 'test_user'}
+ }
+}
+
+ZRR = zapi_responses({
+ 'unix_user_info': build_zapi_response(unix_user_info)
+})
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'vserver': 'vserver',
+ 'name': 'user',
+ 'group_id': 2,
+ 'id': '1',
+ 'full_name': 'test_user',
+ 'use_rest': 'never',
+}
+
+
+DEFAULT_NO_USER = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'vserver': 'vserver',
+ 'name': 'no_user',
+ 'group_id': '2',
+ 'id': '1',
+ 'full_name': 'test_user',
+ 'use_rest': 'never',
+}
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ user_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+
+def test_get_nonexistent_user():
+ ''' Test if get_unix_user returns None for non-existent user '''
+ register_responses([
+ ('name-mapping-unix-user-get-iter', ZRR['empty'])
+ ])
+ user_obj = create_module(user_module, DEFAULT_NO_USER)
+ result = user_obj.get_unix_user()
+ assert result is None
+
+
+def test_get_existent_user():
+ ''' Test if get_unix_user returns existent user '''
+ register_responses([
+ ('name-mapping-unix-user-get-iter', ZRR['unix_user_info'])
+ ])
+ user_obj = create_module(user_module, DEFAULT_ARGS)
+ result = user_obj.get_unix_user()
+ assert result
+
+
+def test_get_error_existent_user():
+ ''' Test if get_unix_user returns existent user '''
+ register_responses([
+ ('name-mapping-unix-user-get-iter', ZRR['error'])
+ ])
+ user_module_object = create_module(user_module, DEFAULT_ARGS)
+ msg = "Error getting UNIX user"
+ assert msg in expect_and_capture_ansible_exception(user_module_object.get_unix_user, 'fail')['msg']
+
+
+def test_create_unix_user_zapi():
+ register_responses([
+ ('name-mapping-unix-user-get-iter', ZRR['empty']),
+ ('name-mapping-unix-user-create', ZRR['success']),
+ ])
+ module_args = {
+ 'name': 'user',
+ 'group_id': '2',
+ 'id': '1',
+ 'full_name': 'test_user',
+ }
+ assert create_and_apply(user_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_create_unix_user_zapi():
+ register_responses([
+ ('name-mapping-unix-user-get-iter', ZRR['empty']),
+ ('name-mapping-unix-user-create', ZRR['error']),
+ ])
+ module_args = {
+ 'name': 'user4',
+ 'group_id': '4',
+ 'id': '4',
+ 'full_name': 'test_user4',
+ }
+ error = create_and_apply(user_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ msg = "Error creating UNIX user"
+ assert msg in error
+
+
+def test_delete_unix_user_zapi():
+ register_responses([
+ ('name-mapping-unix-user-get-iter', ZRR['unix_user_info']),
+ ('name-mapping-unix-user-destroy', ZRR['success']),
+ ])
+ module_args = {
+ 'name': 'user',
+ 'group_id': '2',
+ 'id': '1',
+ 'full_name': 'test_user',
+ 'state': 'absent'
+ }
+ assert create_and_apply(user_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_remove_unix_user_zapi():
+ register_responses([
+ ('name-mapping-unix-user-get-iter', ZRR['unix_user_info']),
+ ('name-mapping-unix-user-destroy', ZRR['error']),
+ ])
+ module_args = {
+ 'name': 'user',
+ 'group_id': '2',
+ 'id': '1',
+ 'full_name': 'test_user',
+ 'state': 'absent'
+ }
+ error = create_and_apply(user_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ msg = "Error removing UNIX user"
+ assert msg in error
+
+
+def test_modify_unix_user_id_zapi():
+ register_responses([
+ ('name-mapping-unix-user-get-iter', ZRR['unix_user_info']),
+ ('name-mapping-unix-user-modify', ZRR['success']),
+ ])
+ module_args = {
+ 'group_id': '3',
+ 'id': '2'
+ }
+ assert create_and_apply(user_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_unix_user_full_name_zapi():
+ register_responses([
+ ('name-mapping-unix-user-get-iter', ZRR['unix_user_info']),
+ ('name-mapping-unix-user-modify', ZRR['success']),
+ ])
+ module_args = {
+ 'full_name': 'test_user1'
+ }
+ assert create_and_apply(user_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_modify_unix_user_full_name_zapi():
+ register_responses([
+ ('name-mapping-unix-user-get-iter', ZRR['unix_user_info']),
+ ('name-mapping-unix-user-modify', ZRR['error']),
+ ])
+ module_args = {
+ 'full_name': 'test_user1'
+ }
+ error = create_and_apply(user_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ msg = "Error modifying UNIX user"
+ assert msg in error
+
+
+def test_create_idempotent():
+ register_responses([
+ ('name-mapping-unix-user-get-iter', ZRR['unix_user_info'])
+ ])
+ module_args = {
+ 'state': 'present',
+ 'name': 'user',
+ 'group_id': 2,
+ 'id': '1',
+ 'full_name': 'test_user',
+ }
+ assert not create_and_apply(user_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_delete_idempotent():
+ register_responses([
+ ('name-mapping-unix-user-get-iter', ZRR['empty'])
+ ])
+ module_args = {
+ 'state': 'absent'
+ }
+ assert not create_and_apply(user_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_if_all_methods_catch_exception():
+ register_responses([
+ ('name-mapping-unix-user-get-iter', ZRR['error']),
+ ('name-mapping-unix-user-create', ZRR['error']),
+ ('name-mapping-unix-user-destroy', ZRR['error']),
+ ('name-mapping-unix-user-modify', ZRR['error'])
+ ])
+ module_args = {'id': 5}
+ my_obj = create_module(user_module, DEFAULT_ARGS, module_args)
+
+ error = expect_and_capture_ansible_exception(my_obj.get_unix_user, 'fail')['msg']
+ assert 'Error getting UNIX user user: NetApp API failed. Reason - 12345:synthetic error for UT purpose' in error
+
+ error = expect_and_capture_ansible_exception(my_obj.create_unix_user, 'fail')['msg']
+ assert 'Error creating UNIX user user: NetApp API failed. Reason - 12345:synthetic error for UT purpose' in error
+
+ error = expect_and_capture_ansible_exception(my_obj.delete_unix_user, 'fail')['msg']
+ assert 'Error removing UNIX user user: NetApp API failed. Reason - 12345:synthetic error for UT purpose' in error
+
+ error = expect_and_capture_ansible_exception(my_obj.modify_unix_user, 'fail', 'name-mapping-unix-user-modify')['msg']
+ assert 'Error modifying UNIX user user: NetApp API failed. Reason - 12345:synthetic error for UT purpose' in error
+
+
+ARGS_REST = {
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'use_rest': 'always',
+ 'vserver': 'vserver',
+ 'name': 'user',
+ 'primary_gid': 2,
+ 'id': 1,
+ 'full_name': 'test_user'
+}
+
+REST_NO_USER = {
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'use_rest': 'always',
+ 'vserver': 'vserver',
+ 'name': 'user5',
+ 'primary_gid': 2,
+ 'id': 1,
+ 'full_name': 'test_user'
+}
+
+
+def test_get_nonexistent_user_rest_rest():
+ ''' Test if get_unix_user returns None for non-existent user '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'name-services/unix-users', SRR['empty_records']),
+ ])
+ user_obj = create_module(user_module, REST_NO_USER)
+ result = user_obj.get_unix_user_rest()
+ assert result is None
+
+
+def test_get_existent_user_rest():
+ ''' Test if get_unix_user returns existent user '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'name-services/unix-users', SRR['user_record']),
+ ])
+ user_obj = create_module(user_module, ARGS_REST)
+ result = user_obj.get_unix_user_rest()
+ assert result
+
+
+def test_get_error_existent_user_rest():
+ ''' Test if get_unix_user returns existent user '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'name-services/unix-users', SRR['generic_error']),
+ ])
+ error = create_and_apply(user_module, ARGS_REST, fail=True)['msg']
+ msg = "Error on getting unix-user info:"
+ assert msg in error
+
+
+def test_create_unix_user_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'name-services/unix-users', SRR['empty_records']),
+ ('POST', 'name-services/unix-users', SRR['empty_good']),
+ ])
+ module_args = {
+ 'name': 'user',
+ 'primary_gid': 2,
+ 'id': 1,
+ 'full_name': 'test_user',
+ }
+ assert create_and_apply(user_module, ARGS_REST, module_args)['changed']
+
+
+def test_error_create_unix_user_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'name-services/unix-users', SRR['empty_records']),
+ ('POST', 'name-services/unix-users', SRR['generic_error']),
+ ])
+ module_args = {
+ 'name': 'user4',
+ 'primary_gid': 4,
+ 'id': 4,
+ 'full_name': 'test_user4',
+ }
+ error = create_and_apply(user_module, ARGS_REST, module_args, fail=True)['msg']
+ msg = "Error on creating unix-user:"
+ assert msg in error
+
+
+def test_delete_unix_user_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'name-services/unix-users', SRR['user_record']),
+ ('DELETE', 'name-services/unix-users/671aa46e-11ad-11ec-a267-005056b30cfa/user', SRR['empty_good']),
+ ])
+ module_args = {
+ 'name': 'user',
+ 'group_id': '2',
+ 'id': '1',
+ 'full_name': 'test_user',
+ 'state': 'absent'
+ }
+ assert create_and_apply(user_module, ARGS_REST, module_args)['changed']
+
+
+def test_error_remove_unix_user_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'name-services/unix-users', SRR['user_record']),
+ ('DELETE', 'name-services/unix-users/671aa46e-11ad-11ec-a267-005056b30cfa/user', SRR['generic_error'])
+ ])
+ module_args = {
+ 'name': 'user',
+ 'id': '1',
+ 'state': 'absent'
+ }
+ error = create_and_apply(user_module, ARGS_REST, module_args, fail=True)['msg']
+ msg = "Error on deleting unix-user"
+ assert msg in error
+
+
+def test_modify_unix_user_id_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'name-services/unix-users', SRR['user_record']),
+ ('PATCH', 'name-services/unix-users/671aa46e-11ad-11ec-a267-005056b30cfa/user', SRR['empty_good'])
+ ])
+ module_args = {
+ 'name': 'user',
+ 'group_id': '3',
+ 'id': '2'
+ }
+ assert create_and_apply(user_module, ARGS_REST, module_args)['changed']
+
+
+def test_modify_unix_user_full_name_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'name-services/unix-users', SRR['user_record']),
+ ('PATCH', 'name-services/unix-users/671aa46e-11ad-11ec-a267-005056b30cfa/user', SRR['empty_good'])
+ ])
+ module_args = {
+ 'name': 'user',
+ 'full_name': 'test_user1'
+ }
+ assert create_and_apply(user_module, ARGS_REST, module_args)['changed']
+
+
+def test_error_modify_unix_user_full_name_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'name-services/unix-users', SRR['user_record']),
+ ('PATCH', 'name-services/unix-users/671aa46e-11ad-11ec-a267-005056b30cfa/user', SRR['generic_error'])
+ ])
+ module_args = {
+ 'name': 'user',
+ 'full_name': 'test_user1'
+ }
+ error = create_and_apply(user_module, ARGS_REST, module_args, fail=True)['msg']
+ msg = "Error on modifying unix-user:"
+ assert msg in error
+
+
+def test_create_idempotent_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'name-services/unix-users', SRR['user_record']),
+ ])
+ module_args = {
+ 'state': 'present',
+ 'name': 'user',
+ 'group_id': 2,
+ 'id': '1',
+ 'full_name': 'test_user',
+ }
+ assert not create_and_apply(user_module, ARGS_REST, module_args)['changed']
+
+
+def test_delete_idempotent_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'name-services/unix-users', SRR['empty_records']),
+ ])
+ module_args = {
+ 'state': 'absent'
+ }
+ assert not create_and_apply(user_module, ARGS_REST, module_args)['changed']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_user.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_user.py
new file mode 100644
index 000000000..4b3294798
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_user.py
@@ -0,0 +1,744 @@
+# (c) 2018-2023, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests ONTAP Ansible module: na_ontap_user '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import\
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_error_message, rest_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_error, build_zapi_response, zapi_error_message, zapi_responses
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import\
+ call_main, create_module, expect_and_capture_ansible_exception, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_user import NetAppOntapUser as my_module, main as my_main # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+# REST API canned responses when mocking send_request
+SRR = rest_responses({
+ 'repeated_password': (400, None, {'message': "New password must be different than the old password."}),
+ 'get_uuid': (200, {'owner': {'uuid': 'ansible'}}, None),
+ 'get_user_rest': (200,
+ {'num_records': 1,
+ 'records': [{'owner': {'uuid': 'ansible_vserver'},
+ 'name': 'abcd'}]}, None),
+ 'get_user_rest_multiple': (200,
+ {'num_records': 2,
+ 'records': [{'owner': {'uuid': 'ansible_vserver'},
+ 'name': 'abcd'},
+ {}]}, None),
+ 'get_user_details_rest': (200,
+ {'role': {'name': 'vsadmin'},
+ 'applications': [{'application': 'http'}],
+ 'locked': False}, None),
+ 'get_user_details_rest_no_pwd': (200, # locked is absent if no password was set
+ {'role': {'name': 'vsadmin'},
+ 'applications': [{'application': 'http'}],
+ }, None)
+}, True)
+
+
+def login_info(locked, role_name, apps):
+ attributes_list = []
+ for app in apps:
+ if app in ('console', 'service-processor'):
+ attributes_list.append(
+ {'security-login-account-info': {
+ 'is-locked': locked, 'role-name': role_name, 'application': app, 'authentication-method': 'password'}}
+ )
+ if app in ('ssh',):
+ attributes_list.append(
+ {'security-login-account-info': {
+ 'is-locked': locked, 'role-name': role_name, 'application': 'ssh', 'authentication-method': 'publickey',
+ 'second-authentication-method': 'password'}},
+ )
+ if app in ('http',):
+ attributes_list.extend([
+ {'security-login-account-info': {
+ 'is-locked': locked, 'role-name': role_name, 'application': 'http', 'authentication-method': 'password'}},
+ ])
+ return {
+ 'num-records': len(attributes_list),
+ 'attributes-list': attributes_list
+ }
+
+
+ZRR = zapi_responses({
+ 'login_locked_user': build_zapi_response(login_info("true", 'user', ['console', 'ssh'])),
+ 'login_unlocked_user': build_zapi_response(login_info("False", 'user', ['console', 'ssh'])),
+ 'login_unlocked_user_http': build_zapi_response(login_info("False", 'user', ['http'])),
+ 'login_unlocked_user_service_processor': build_zapi_response(login_info("False", 'user', ['service-processor'])),
+ 'user_not_found': build_zapi_error('16034', "This exception should not be seen"),
+ 'internal_error': build_zapi_error('13114', "Forcing an internal error"),
+ 'reused_password': build_zapi_error('13214', "New password must be different than last 6 passwords."),
+}, True)
+
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'name': 'user_name',
+ 'vserver': 'vserver',
+}
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ register_responses([
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ }
+ print('Info: %s' % call_main(my_main, {}, module_args, fail=True)['msg'])
+
+
+def test_module_fail_when_vserver_missing():
+ ''' required arguments are reported as errors '''
+ register_responses([
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'name': 'user_name',
+ }
+ assert 'Error: vserver is required' in call_main(my_main, {}, module_args, fail=True)['msg']
+
+
+def test_ensure_user_get_called():
+ ''' a more interesting test '''
+ register_responses([
+ ('ZAPI', 'security-login-get-iter', ZRR['success']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'role_name': 'test',
+ 'applications': 'http',
+ 'authentication_method': 'password',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ # app = dict(application='testapp', authentication_methods=['testam'])
+ user_info = my_obj.get_user()
+ print('Info: test_user_get: %s' % repr(user_info))
+ assert user_info is None
+
+
+def test_ensure_user_get_called_not_found():
+ ''' a more interesting test '''
+ register_responses([
+ ('ZAPI', 'security-login-get-iter', ZRR['user_not_found']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'role_name': 'test',
+ 'applications': 'http',
+ 'authentication_method': 'password',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ # app = dict(application='testapp', authentication_methods=['testam'])
+ user_info = my_obj.get_user()
+ print('Info: test_user_get: %s' % repr(user_info))
+ assert user_info is None
+
+
+def test_ensure_user_apply_called():
+ ''' creating user and checking idempotency '''
+ register_responses([
+ ('ZAPI', 'security-login-get-iter', ZRR['success']),
+ ('ZAPI', 'security-login-create', ZRR['success']),
+ ('ZAPI', 'security-login-get-iter', ZRR['login_unlocked_user_http']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'name': 'create',
+ 'role_name': 'user',
+ 'applications': 'http',
+ 'authentication_method': 'password',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_ensure_user_sp_apply_called():
+ ''' creating user with service_processor application and idempotency '''
+ register_responses([
+ ('ZAPI', 'security-login-get-iter', ZRR['no_records']),
+ ('ZAPI', 'security-login-create', ZRR['success']),
+ ('ZAPI', 'security-login-get-iter', ZRR['login_unlocked_user_service_processor']),
+ ('ZAPI', 'security-login-get-iter', ZRR['no_records']),
+ ('ZAPI', 'security-login-create', ZRR['success']),
+ ('ZAPI', 'security-login-get-iter', ZRR['login_unlocked_user_service_processor']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'name': 'create',
+ 'role_name': 'user',
+ 'applications': 'service-processor',
+ 'authentication_method': 'password',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ module_args['applications'] = 'service_processor'
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_ensure_user_apply_for_delete_called():
+ ''' deleting user and checking idempotency '''
+ register_responses([
+ ('ZAPI', 'security-login-get-iter', ZRR['login_unlocked_user']),
+ ('ZAPI', 'security-login-delete', ZRR['success']),
+ ('ZAPI', 'security-login-get-iter', ZRR['no_records']),
+ ])
+ module_args = {
+ "use_rest": "never",
+ "state": "absent",
+ 'name': 'create',
+ 'role_name': 'user',
+ 'applications': 'console',
+ 'authentication_method': 'password',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_ensure_user_lock_called():
+ ''' changing user_lock to True and checking idempotency'''
+ register_responses([
+ ('ZAPI', 'security-login-get-iter', ZRR['login_unlocked_user']),
+ ('ZAPI', 'security-login-get-iter', ZRR['login_unlocked_user']),
+ ('ZAPI', 'security-login-lock', ZRR['success']),
+ ])
+ module_args = {
+ "use_rest": "never",
+ "lock_user": False,
+ 'name': 'create',
+ 'role_name': 'user',
+ 'applications': 'console',
+ 'authentication_method': 'password',
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ module_args['lock_user'] = 'true'
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_ensure_user_unlock_called():
+ ''' changing user_lock to False and checking idempotency'''
+ register_responses([
+ ('ZAPI', 'security-login-get-iter', ZRR['login_locked_user']),
+ ('ZAPI', 'security-login-get-iter', ZRR['login_locked_user']),
+ ('ZAPI', 'security-login-unlock', ZRR['success']),
+ ])
+ module_args = {
+ "use_rest": "never",
+ "lock_user": True,
+ 'name': 'create',
+ 'role_name': 'user',
+ 'applications': 'console',
+ 'authentication_method': 'password',
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ module_args['lock_user'] = 'false'
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_ensure_user_set_password_called():
+ ''' set password '''
+ register_responses([
+ ('ZAPI', 'security-login-get-iter', ZRR['login_unlocked_user']),
+ ('ZAPI', 'security-login-modify-password', ZRR['success']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'name': 'create',
+ 'role_name': 'user',
+ 'applications': 'console',
+ 'authentication_method': 'password',
+ 'set_password': '123456',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_set_password_internal_error():
+ ''' set password '''
+ register_responses([
+ ('ZAPI', 'security-login-modify-password', ZRR['internal_error']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'name': 'create',
+ 'role_name': 'user',
+ 'applications': 'console',
+ 'authentication_method': 'password',
+ 'set_password': '123456',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert not my_obj.change_password()
+
+
+def test_set_password_reused():
+ ''' set password '''
+ register_responses([
+ ('ZAPI', 'security-login-modify-password', ZRR['reused_password'])
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'name': 'create',
+ 'role_name': 'user',
+ 'applications': 'console',
+ 'authentication_method': 'password',
+ 'set_password': '123456',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert not my_obj.change_password()
+
+
+def test_ensure_user_role_update_called():
+ ''' set password '''
+ register_responses([
+ ('ZAPI', 'security-login-get-iter', ZRR['login_unlocked_user']),
+ ('ZAPI', 'security-login-modify', ZRR['success']),
+ ('ZAPI', 'security-login-modify-password', ZRR['success']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'name': 'create',
+ 'role_name': 'test123',
+ 'applications': 'console',
+ 'authentication_method': 'password',
+ 'set_password': '123456',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_ensure_user_role_update_additional_application_called():
+ ''' set password '''
+ register_responses([
+ ('ZAPI', 'security-login-get-iter', ZRR['login_unlocked_user']),
+ ('ZAPI', 'security-login-create', ZRR['success']),
+ ('ZAPI', 'security-login-delete', ZRR['success']),
+ ('ZAPI', 'security-login-delete', ZRR['success']),
+ ('ZAPI', 'security-login-modify-password', ZRR['success']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'name': 'create',
+ 'role_name': 'test123',
+ 'applications': 'http',
+ 'authentication_method': 'password',
+ 'set_password': '123456',
+ 'replace_existing_apps_and_methods': 'always'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_if_all_methods_catch_exception():
+ register_responses([
+ ('ZAPI', 'security-login-get-iter', ZRR['error']),
+ ('ZAPI', 'security-login-create', ZRR['error']),
+ ('ZAPI', 'security-login-lock', ZRR['error']),
+ ('ZAPI', 'security-login-unlock', ZRR['error']),
+ ('ZAPI', 'security-login-delete', ZRR['error']),
+ ('ZAPI', 'security-login-modify-password', ZRR['error']),
+ ('ZAPI', 'security-login-modify', ZRR['error']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'role_name': 'test',
+ 'applications': 'console',
+ 'authentication_method': 'password',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ app = dict(application='console', authentication_methods=['password'])
+ assert zapi_error_message('Error getting user user_name') in expect_and_capture_ansible_exception(my_obj.get_user, 'fail')['msg']
+ assert zapi_error_message('Error creating user user_name') in expect_and_capture_ansible_exception(my_obj.create_user, 'fail', app)['msg']
+ assert zapi_error_message('Error locking user user_name') in expect_and_capture_ansible_exception(my_obj.lock_given_user, 'fail')['msg']
+ assert zapi_error_message('Error unlocking user user_name') in expect_and_capture_ansible_exception(my_obj.unlock_given_user, 'fail')['msg']
+ assert zapi_error_message('Error removing user user_name') in expect_and_capture_ansible_exception(my_obj.delete_user, 'fail', app)['msg']
+ assert zapi_error_message('Error setting password for user user_name') in expect_and_capture_ansible_exception(my_obj.change_password, 'fail')['msg']
+ assert zapi_error_message('Error modifying user user_name') in expect_and_capture_ansible_exception(my_obj.modify_user, 'fail', app, ['password'])['msg']
+ err_msg = 'vserver is required with ZAPI'
+ assert err_msg in create_module(my_module, DEFAULT_ARGS, {'use_rest': 'never', 'svm': None}, fail=True)['msg']
+
+
+def test_create_user_with_usm_auth():
+ ''' switching back to ZAPI '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('ZAPI', 'security-login-get-iter', ZRR['no_records']),
+ ('ZAPI', 'security-login-create', ZRR['success']),
+ ])
+ module_args = {
+ 'use_rest': 'auto',
+ 'applications': 'snmp',
+ 'authentication_method': 'usm',
+ 'name': 'create',
+ 'role_name': 'test123',
+ 'set_password': '123456',
+ 'remote_switch_ipaddress': '12.34.56.78',
+ 'authentication_password': 'auth_pwd',
+ 'authentication_protocol': 'md5',
+ 'privacy_password': 'auth_pwd',
+ 'privacy_protocol': 'des',
+ 'engine_id': 'engine_123',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_error_applications_snmp():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'applications': 'snmp',
+ 'authentication_method': 'usm',
+ 'name': 'create',
+ 'role_name': 'test123',
+ 'set_password': '123456',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg'] == "snmp as application is not supported in REST."
+
+
+def test_ensure_user_get_rest_called():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'security/accounts', SRR['get_user_rest']),
+ ])
+ module_args = {
+ "use_rest": "always",
+ 'role_name': 'vsadmin',
+ 'applications': ['http', 'ontapi'],
+ 'authentication_method': 'password',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.get_user_rest() is not None
+
+
+def test_ensure_create_user_rest_called():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'security/accounts', SRR['zero_records']),
+ ('POST', 'security/accounts', SRR['empty_good']),
+ ])
+ module_args = {
+ "use_rest": "always",
+ 'role_name': 'vsadmin',
+ 'applications': ['http', 'ontapi'],
+ 'authentication_method': 'password',
+ 'set_password': 'xfjjttjwll`1',
+ 'lock_user': True
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_ensure_create_cluster_user_rest_called():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'security/accounts', SRR['zero_records']),
+ ('POST', 'security/accounts', SRR['empty_good']),
+ ])
+ module_args = {
+ "hostname": "hostname",
+ "username": "username",
+ "password": "password",
+ "name": "user_name",
+ "use_rest": "always",
+ 'role_name': 'vsadmin',
+ 'applications': ['http', 'ontapi'],
+ 'authentication_method': 'password',
+ 'set_password': 'xfjjttjwll`1',
+ 'lock_user': True
+ }
+ assert call_main(my_main, module_args)['changed']
+
+
+def test_ensure_delete_user_rest_called():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'security/accounts', SRR['get_user_rest']),
+ ('GET', 'security/accounts/ansible_vserver/abcd', SRR['get_user_details_rest']),
+ ('DELETE', 'security/accounts/ansible_vserver/abcd', SRR['empty_good']),
+ ])
+ module_args = {
+ "use_rest": "always",
+ 'state': 'absent',
+ 'role_name': 'vsadmin',
+ 'applications': ['http', 'ontapi'],
+ 'authentication_method': 'password',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_ensure_modify_user_rest_called():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'security/accounts', SRR['get_user_rest']),
+ ('GET', 'security/accounts/ansible_vserver/abcd', SRR['get_user_details_rest']),
+ ('PATCH', 'security/accounts/ansible_vserver/abcd', SRR['empty_good']),
+ ])
+ module_args = {
+ "use_rest": "always",
+ 'role_name': 'vsadmin',
+ 'application': 'ssh',
+ 'authentication_method': 'password',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_ensure_lock_unlock_user_rest_called():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'security/accounts', SRR['get_user_rest']),
+ ('GET', 'security/accounts/ansible_vserver/abcd', SRR['get_user_details_rest']),
+ ('PATCH', 'security/accounts/ansible_vserver/abcd', SRR['empty_good']),
+ ('PATCH', 'security/accounts/ansible_vserver/abcd', SRR['empty_good']),
+ ])
+ module_args = {
+ "use_rest": "always",
+ 'role_name': 'vsadmin',
+ 'applications': 'http',
+ 'authentication_method': 'password',
+ 'lock_user': True,
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_ensure_change_password_user_rest_called():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'security/accounts', SRR['get_user_rest']),
+ ('GET', 'security/accounts/ansible_vserver/abcd', SRR['get_user_details_rest']),
+ ('PATCH', 'security/accounts/ansible_vserver/abcd', SRR['empty_good']),
+ ])
+ module_args = {
+ 'set_password': 'newvalue',
+ 'use_rest': 'always',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_change_password_user_rest_check_mode():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'security/accounts', SRR['get_user_rest']),
+ ('GET', 'security/accounts/ansible_vserver/abcd', SRR['get_user_details_rest']),
+ ])
+ module_args = {
+ 'set_password': 'newvalue',
+ 'use_rest': 'always',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ my_obj.module.check_mode = True
+ assert expect_and_capture_ansible_exception(my_obj.apply, 'exit')['changed']
+
+
+def test_existing_password():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'security/accounts', SRR['get_user_rest']),
+ ('GET', 'security/accounts/ansible_vserver/abcd', SRR['get_user_details_rest']),
+ ('PATCH', 'security/accounts/ansible_vserver/abcd', SRR['repeated_password']), # password
+ ])
+ module_args = {
+ 'set_password': 'newvalue',
+ 'use_rest': 'always',
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_negative_rest_unsupported_property():
+ register_responses([
+ ])
+ module_args = {
+ 'privacy_password': 'value',
+ 'use_rest': 'always',
+ }
+ msg = "REST API currently does not support 'privacy_password'"
+ assert msg == call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.has_netapp_lib')
+def test_negative_zapi_missing_netapp_lib(mock_has):
+ register_responses([
+ ])
+ mock_has.return_value = False
+ module_args = {
+ 'use_rest': 'never',
+ }
+ msg = "Error: the python NetApp-Lib module is required. Import error: None"
+ assert msg == call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_negative_zapi_missing_apps():
+ register_responses([
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ }
+ msg = "application_dicts or application_strs is a required parameter with ZAPI"
+ assert msg == call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_negative_rest_error_on_get_user():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'security/accounts', SRR['generic_error']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ }
+ msg = "Error while fetching user info: Expected error"
+ assert msg == call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_negative_rest_error_on_get_user_multiple():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'security/accounts', SRR['get_user_rest_multiple']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ }
+ msg = "Error while fetching user info, found multiple entries:"
+ assert msg in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_negative_rest_error_on_get_user_details():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'security/accounts', SRR['get_user_rest']),
+ ('GET', 'security/accounts/ansible_vserver/abcd', SRR['generic_error']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ }
+ msg = "Error while fetching user details: Expected error"
+ assert msg == call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_negative_rest_error_on_delete():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'security/accounts', SRR['get_user_rest']),
+ ('GET', 'security/accounts/ansible_vserver/abcd', SRR['get_user_details_rest']),
+ ('DELETE', 'security/accounts/ansible_vserver/abcd', SRR['generic_error']),
+ ])
+ module_args = {
+ "use_rest": "always",
+ 'state': 'absent',
+ 'role_name': 'vsadmin',
+ }
+ msg = "Error while deleting user: Expected error"
+ assert msg == call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_negative_rest_error_on_unlocking():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'security/accounts', SRR['get_user_rest']),
+ ('GET', 'security/accounts/ansible_vserver/abcd', SRR['get_user_details_rest']),
+ ('PATCH', 'security/accounts/ansible_vserver/abcd', SRR['generic_error']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'lock_user': True
+ }
+ msg = "Error while locking/unlocking user: Expected error"
+ assert msg == call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_negative_rest_error_on_unlocking_no_password():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'security/accounts', SRR['get_user_rest']),
+ ('GET', 'security/accounts/ansible_vserver/abcd', SRR['get_user_details_rest_no_pwd']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'lock_user': True
+ }
+ msg = "Error: cannot modify lock state if password is not set."
+ assert msg == call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_negative_rest_error_on_changing_password():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'security/accounts', SRR['get_user_rest']),
+ ('GET', 'security/accounts/ansible_vserver/abcd', SRR['get_user_details_rest']),
+ ('PATCH', 'security/accounts/ansible_vserver/abcd', SRR['generic_error']),
+ ])
+ module_args = {
+ 'set_password': '12345',
+ 'use_rest': 'always',
+ }
+ msg = "Error while updating user password: Expected error"
+ assert msg == call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_negative_rest_error_on_modify():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'security/accounts', SRR['get_user_rest']),
+ ('GET', 'security/accounts/ansible_vserver/abcd', SRR['get_user_details_rest']),
+ ('PATCH', 'security/accounts/ansible_vserver/abcd', SRR['generic_error']),
+ ])
+ module_args = {
+ 'role_name': 'vsadmin2',
+ 'use_rest': 'always',
+ 'applications': ['http', 'ontapi'],
+ 'authentication_method': 'password',
+ }
+ msg = "Error while modifying user details: Expected error"
+ assert msg == call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_rest_unlocking_with_password():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'security/accounts', SRR['get_user_rest']),
+ ('GET', 'security/accounts/ansible_vserver/abcd', SRR['get_user_details_rest_no_pwd']),
+ ('PATCH', 'security/accounts/ansible_vserver/abcd', SRR['success']),
+ ('PATCH', 'security/accounts/ansible_vserver/abcd', SRR['success']),
+ ])
+ module_args = {
+ 'set_password': 'ansnssnajj12%',
+ 'use_rest': 'always',
+ 'lock_user': True
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_negative_create_validations():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'security/accounts', SRR['zero_records']),
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'security/accounts', SRR['zero_records']),
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'security/accounts', SRR['zero_records']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ }
+ msg = 'Error: missing required parameters for create: role_name and: application_dicts or application_strs.'
+ assert msg == call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ module_args['role_name'] = 'role'
+ msg = 'Error: missing required parameter for create: application_dicts or application_strs.'
+ assert msg == call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ module_args.pop('role_name')
+ module_args['applications'] = 'http'
+ module_args['authentication_method'] = 'password'
+ msg = 'Error: missing required parameter for create: role_name.'
+ assert msg == call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_user_dicts.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_user_dicts.py
new file mode 100644
index 000000000..a4181d54d
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_user_dicts.py
@@ -0,0 +1,589 @@
+# (c) 2018 - 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests ONTAP Ansible module: na_ontap_user '''
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+import pytest
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import\
+ patch_request_and_invoke, print_requests, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_error_message, zapi_responses
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import\
+ expect_and_capture_ansible_exception, call_main, create_module, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_user import NetAppOntapUser as my_module, main as my_main # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+# REST API canned responses when mocking send_request
+SRR = rest_responses({
+ 'invalid_value_error': (400, None, {'message': "invalid value service_processor"}),
+ 'get_user_rest': (200,
+ {'num_records': 1,
+ 'records': [{'owner': {'uuid': 'ansible_vserver'},
+ 'name': 'abcd'}]}, None),
+ 'get_user_details_rest': (200,
+ {'role': {'name': 'vsadmin'},
+ 'applications': [{'application': 'http', 'authentication-method': 'password', 'second_authentication_method': 'none'}],
+ 'locked': False}, None)
+})
+
+
+def login_info(locked, role_name, apps):
+ attributes_list = []
+ for app in apps:
+ if app in ('console', 'service-processor',):
+ attributes_list.append(
+ {'security-login-account-info': {
+ 'is-locked': locked, 'role-name': role_name, 'application': app, 'authentication-method': 'password'}}
+ )
+ if app in ('ssh',):
+ attributes_list.append(
+ {'security-login-account-info': {
+ 'is-locked': locked, 'role-name': role_name, 'application': 'ssh', 'authentication-method': 'publickey',
+ 'second-authentication-method': 'password'}},
+ )
+ if app in ('http',):
+ attributes_list.extend([
+ {'security-login-account-info': {
+ 'is-locked': locked, 'role-name': role_name, 'application': 'http', 'authentication-method': 'password'}},
+ {'security-login-account-info': {
+ 'is-locked': locked, 'role-name': role_name, 'application': 'http', 'authentication-method': 'saml'}},
+ ])
+ return {
+ 'num-records': len(attributes_list),
+ 'attributes-list': attributes_list
+ }
+
+
+ZRR = zapi_responses({
+ 'login_locked_user': build_zapi_response(login_info("true", 'user', ['console', 'ssh'])),
+ 'login_unlocked_user': build_zapi_response(login_info("False", 'user', ['console', 'ssh'])),
+ 'login_unlocked_user_console': build_zapi_response(login_info("False", 'user', ['console'])),
+ 'login_unlocked_user_service_processor': build_zapi_response(login_info("False", 'user', ['service-processor'])),
+ 'login_unlocked_user_ssh': build_zapi_response(login_info("False", 'user', ['ssh'])),
+ 'login_unlocked_user_http': build_zapi_response(login_info("False", 'user', ['http']))
+})
+
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'name': 'user_name',
+ 'vserver': 'vserver',
+}
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ register_responses([
+ ])
+ module_args = {
+ "use_rest": "never"
+ }
+ print('Info: %s' % call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg'])
+
+
+def test_module_fail_when_application_name_is_repeated():
+ ''' required arguments are reported as errors '''
+ register_responses([
+ ])
+ module_args = {
+ "use_rest": "never",
+ "application_dicts": [
+ {'application': 'ssh', 'authentication_methods': ['cert']},
+ {'application': 'ssh', 'authentication_methods': ['password']}]
+ }
+ error = 'Error: repeated application name: ssh. Group all authentication methods under a single entry.'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_ensure_user_get_called():
+ ''' a more interesting test '''
+ register_responses([
+ ('ZAPI', 'security-login-get-iter', ZRR['login_unlocked_user_http']),
+ ])
+ module_args = {
+ "use_rest": "never",
+ 'role_name': 'test',
+ 'applications': 'console',
+ 'authentication_method': 'password',
+ 'replace_existing_apps_and_methods': 'always'
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ user_info = my_obj.get_user()
+ print('Info: test_user_get: %s' % repr(user_info))
+ assert 'saml' in user_info['applications'][0]['authentication_methods']
+
+
+def test_ensure_user_apply_called_replace():
+ ''' creating user and checking idempotency '''
+ register_responses([
+ ('ZAPI', 'security-login-get-iter', ZRR['no_records']),
+ ('ZAPI', 'security-login-create', ZRR['success']),
+ ('ZAPI', 'security-login-get-iter', ZRR['login_unlocked_user']),
+ ])
+ module_args = {
+ "use_rest": "never",
+ 'name': 'create',
+ 'role_name': 'user',
+ 'applications': 'console',
+ 'authentication_method': 'password',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_ensure_user_apply_called_using_dict():
+ ''' creating user and checking idempotency '''
+ register_responses([
+ ('ZAPI', 'security-login-get-iter', ZRR['no_records']),
+ ('ZAPI', 'security-login-create', ZRR['success']),
+ ('ZAPI', 'security-login-get-iter', ZRR['login_unlocked_user_ssh']),
+ ])
+ module_args = {
+ "use_rest": "never",
+ 'name': 'create',
+ 'role_name': 'user',
+ 'application_dicts': [{
+ 'application': 'ssh',
+ 'authentication_methods': ['publickey'],
+ 'second_authentication_method': 'password'
+ }]
+ }
+
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ # BUG: SSH is not idempotent with SSH and replace_existing_apps_and_methods == 'auto'
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_ensure_user_apply_called_add():
+ ''' creating user and checking idempotency '''
+ register_responses([
+ ('ZAPI', 'security-login-get-iter', ZRR['no_records']),
+ ('ZAPI', 'security-login-create', ZRR['success']),
+ ('ZAPI', 'security-login-get-iter', ZRR['login_unlocked_user']),
+ ('ZAPI', 'security-login-modify', ZRR['success']),
+ ('ZAPI', 'security-login-delete', ZRR['success']),
+ ('ZAPI', 'security-login-get-iter', ZRR['login_unlocked_user_console']),
+ ])
+ module_args = {
+ "use_rest": "never",
+ 'name': 'create',
+ 'role_name': 'user',
+ 'application_dicts':
+ [dict(application='console', authentication_methods=['password'])],
+ 'replace_existing_apps_and_methods': 'always'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_ensure_user_sp_apply_called():
+ ''' creating user with service_processor application and idempotency '''
+ register_responses([
+ ('ZAPI', 'security-login-get-iter', ZRR['no_records']),
+ ('ZAPI', 'security-login-create', ZRR['success']),
+ ('ZAPI', 'security-login-get-iter', ZRR['login_unlocked_user_service_processor']),
+ ('ZAPI', 'security-login-get-iter', ZRR['no_records']),
+ ('ZAPI', 'security-login-create', ZRR['success']),
+ ('ZAPI', 'security-login-get-iter', ZRR['login_unlocked_user_service_processor']),
+ ])
+ module_args = {
+ "use_rest": "never",
+ 'name': 'create',
+ 'role_name': 'user',
+ 'application_dicts':
+ [dict(application='service-processor', authentication_methods=['password'])],
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ module_args['application_dicts'] = [dict(application='service_processor', authentication_methods=['password'])]
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_ensure_user_apply_for_delete_called():
+ ''' deleting user and checking idempotency '''
+ register_responses([
+ ('ZAPI', 'security-login-get-iter', ZRR['login_unlocked_user']),
+ ('ZAPI', 'security-login-delete', ZRR['success']),
+ ('ZAPI', 'security-login-delete', ZRR['success']),
+ ('ZAPI', 'security-login-get-iter', ZRR['no_records']),
+ ])
+ module_args = {
+ "use_rest": "never",
+ "state": "absent",
+ 'name': 'create',
+ 'role_name': 'user',
+ 'application_dicts':
+ [dict(application='console', authentication_methods=['password'])],
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_ensure_user_lock_called():
+ ''' changing user_lock to True and checking idempotency'''
+ register_responses([
+ ('ZAPI', 'security-login-get-iter', ZRR['login_unlocked_user']),
+ ('ZAPI', 'security-login-get-iter', ZRR['login_unlocked_user']),
+ ('ZAPI', 'security-login-lock', ZRR['success']),
+ ])
+ module_args = {
+ "use_rest": "never",
+ "lock_user": False,
+ 'name': 'create',
+ 'role_name': 'user',
+ 'application_dicts': [
+ dict(application='console', authentication_methods=['password']),
+ dict(application='ssh', authentication_methods=['publickey'], second_authentication_method='password')
+ ],
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ module_args['lock_user'] = True
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_ensure_user_unlock_called():
+ ''' changing user_lock to False and checking idempotency'''
+ register_responses([
+ ('ZAPI', 'security-login-get-iter', ZRR['login_locked_user']),
+ ('ZAPI', 'security-login-get-iter', ZRR['login_locked_user']),
+ ('ZAPI', 'security-login-unlock', ZRR['success']),
+ ])
+ module_args = {
+ "use_rest": "never",
+ "lock_user": True,
+ 'name': 'create',
+ 'role_name': 'user',
+ 'application_dicts': [
+ dict(application='console', authentication_methods=['password']),
+ dict(application='ssh', authentication_methods=['publickey'], second_authentication_method='password')
+ ],
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+ module_args['lock_user'] = False
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_ensure_user_set_password_called():
+ ''' set password '''
+ register_responses([
+ ('ZAPI', 'security-login-get-iter', ZRR['login_unlocked_user']),
+ ('ZAPI', 'security-login-modify-password', ZRR['success']),
+ ])
+ module_args = {
+ "use_rest": "never",
+ 'name': 'create',
+ 'role_name': 'user',
+ 'application_dicts': [
+ dict(application='console', authentication_methods=['password']),
+ dict(application='ssh', authentication_methods=['publickey'], second_authentication_method='password')
+ ],
+ 'set_password': '123456',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_ensure_user_role_update_called():
+ ''' set password '''
+ register_responses([
+ ('ZAPI', 'security-login-get-iter', ZRR['login_unlocked_user']),
+ ('ZAPI', 'security-login-modify', ZRR['success']),
+ ('ZAPI', 'security-login-modify', ZRR['success']),
+ ('ZAPI', 'security-login-modify-password', ZRR['success']),
+ ])
+ module_args = {
+ "use_rest": "never",
+ 'name': 'create',
+ 'role_name': 'test123',
+ 'application_dicts': [
+ dict(application='console', authentication_methods=['password']),
+ dict(application='ssh', authentication_methods=['publickey'], second_authentication_method='password')
+ ],
+ 'set_password': '123456',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_ensure_user_role_update_additional_application_called():
+ ''' set password '''
+ register_responses([
+ ('ZAPI', 'security-login-get-iter', ZRR['login_unlocked_user']),
+ ('ZAPI', 'security-login-create', ZRR['success']),
+ ('ZAPI', 'security-login-delete', ZRR['success']),
+ ('ZAPI', 'security-login-delete', ZRR['success']),
+ ('ZAPI', 'security-login-modify-password', ZRR['success']),
+ ])
+ module_args = {
+ "use_rest": "never",
+ 'name': 'create',
+ 'role_name': 'test123',
+ 'application_dicts':
+ [dict(application='http', authentication_methods=['password'])],
+ 'set_password': '123456',
+ 'replace_existing_apps_and_methods': 'always'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_ensure_user_role_update_additional_method_called():
+ ''' set password '''
+ register_responses([
+ ('ZAPI', 'security-login-get-iter', ZRR['login_unlocked_user']),
+ ('ZAPI', 'security-login-create', ZRR['success']),
+ ('ZAPI', 'security-login-delete', ZRR['success']),
+ ('ZAPI', 'security-login-delete', ZRR['success']),
+ ('ZAPI', 'security-login-modify-password', ZRR['success']),
+ ])
+ module_args = {
+ "use_rest": "never",
+ 'name': 'create',
+ 'role_name': 'test123',
+ 'application_dicts':
+ [dict(application='console', authentication_methods=['domain'])],
+ 'set_password': '123456',
+ 'replace_existing_apps_and_methods': 'always'
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_if_all_methods_catch_exception():
+ register_responses([
+ ('ZAPI', 'security-login-get-iter', ZRR['error']),
+ ('ZAPI', 'security-login-create', ZRR['error']),
+ ('ZAPI', 'security-login-lock', ZRR['error']),
+ ('ZAPI', 'security-login-unlock', ZRR['error']),
+ ('ZAPI', 'security-login-delete', ZRR['error']),
+ ('ZAPI', 'security-login-modify-password', ZRR['error']),
+ ('ZAPI', 'security-login-modify', ZRR['error']),
+ ])
+ module_args = {
+ "use_rest": "never",
+ 'name': 'create',
+ 'role_name': 'test123',
+ 'application_dicts':
+ [dict(application='console', authentication_methods=['password'])],
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ app = dict(application='console', authentication_methods=['password'])
+ assert zapi_error_message('Error getting user create') in expect_and_capture_ansible_exception(my_obj.get_user, 'fail')['msg']
+ assert zapi_error_message('Error creating user create') in expect_and_capture_ansible_exception(my_obj.create_user, 'fail', app)['msg']
+ assert zapi_error_message('Error locking user create') in expect_and_capture_ansible_exception(my_obj.lock_given_user, 'fail')['msg']
+ assert zapi_error_message('Error unlocking user create') in expect_and_capture_ansible_exception(my_obj.unlock_given_user, 'fail')['msg']
+ assert zapi_error_message('Error removing user create') in expect_and_capture_ansible_exception(my_obj.delete_user, 'fail', app)['msg']
+ assert zapi_error_message('Error setting password for user create') in expect_and_capture_ansible_exception(my_obj.change_password, 'fail')['msg']
+ assert zapi_error_message('Error modifying user create') in expect_and_capture_ansible_exception(my_obj.modify_user, 'fail', app, ['password'])['msg']
+
+
+def test_rest_error_applications_snmp():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'cluster', SRR['get_user_rest']),
+ ])
+ module_args = {
+ "use_rest": "always",
+ 'role_name': 'test123',
+ 'application_dicts':
+ [dict(application='snmp', authentication_methods=['usm'])],
+ 'set_password': '123456',
+ }
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ])
+ assert call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg'] == "snmp as application is not supported in REST."
+
+
+def test_ensure_user_get_rest_called():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'security/accounts', SRR['get_user_rest']),
+ ])
+ module_args = {
+ "use_rest": "always",
+ 'role_name': 'vsadmin',
+ 'application_dicts':
+ [dict(application='http', authentication_methods=['password']),
+ dict(application='ontapi', authentication_methods=['password'])],
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert my_obj.get_user_rest() is not None
+
+
+def test_ensure_create_user_rest_called():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'security/accounts', SRR['zero_records']),
+ ('POST', 'security/accounts', SRR['empty_good']),
+ ])
+ module_args = {
+ "use_rest": "always",
+ 'role_name': 'vsadmin',
+ 'application_dicts':
+ [dict(application='http', authentication_methods=['password']),
+ dict(application='ontapi', authentication_methods=['password'])],
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_ensure_delete_user_rest_called():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'security/accounts', SRR['get_user_rest']),
+ ('GET', 'security/accounts/ansible_vserver/abcd', SRR['get_user_details_rest']),
+ ('DELETE', 'security/accounts/ansible_vserver/abcd', SRR['empty_good']),
+ ])
+ module_args = {
+ "use_rest": "always",
+ 'state': 'absent',
+ 'role_name': 'vsadmin',
+ 'application_dicts':
+ [dict(application='http', authentication_methods=['password']),
+ dict(application='ontapi', authentication_methods=['password'])],
+ 'vserver': None
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_ensure_modify_user_rest_called():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'security/accounts', SRR['get_user_rest']),
+ ('GET', 'security/accounts/ansible_vserver/abcd', SRR['get_user_details_rest']),
+ ('PATCH', 'security/accounts/ansible_vserver/abcd', SRR['empty_good']),
+ ])
+ module_args = {
+ "use_rest": "always",
+ 'role_name': 'vsadmin',
+ 'application_dicts': [dict(application='service_processor', authentication_methods=['usm'])]
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_ensure_lock_unlock_user_rest_called():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'security/accounts', SRR['get_user_rest']),
+ ('GET', 'security/accounts/ansible_vserver/abcd', SRR['get_user_details_rest']),
+ ('PATCH', 'security/accounts/ansible_vserver/abcd', SRR['empty_good']),
+ ('PATCH', 'security/accounts/ansible_vserver/abcd', SRR['empty_good']),
+ ])
+ module_args = {
+ "use_rest": "always",
+ 'role_name': 'vsadmin',
+ 'application_dicts':
+ [dict(application='http', authentication_methods=['password'])],
+ 'lock_user': True,
+ }
+ print_requests()
+ # TODO: a single PATCH should be enough ?
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_ensure_change_password_user_rest_called():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'security/accounts', SRR['get_user_rest']),
+ ('GET', 'security/accounts/ansible_vserver/abcd', SRR['get_user_details_rest']),
+ ('PATCH', 'security/accounts/ansible_vserver/abcd', SRR['empty_good']),
+ ])
+ module_args = {
+ "use_rest": "always",
+ 'role_name': 'vsadmin',
+ 'application_dicts':
+ [dict(application='http', authentication_methods=['password'])],
+ 'password': 'newvalue',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_sp_retry():
+ """simulate error in create_user_rest and retry"""
+ register_responses([
+ # retry followed by error
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'security/accounts', SRR['zero_records']),
+ ('POST', 'security/accounts', SRR['invalid_value_error']),
+ ('POST', 'security/accounts', SRR['generic_error']),
+ # retry followed by success
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'security/accounts', SRR['zero_records']),
+ ('POST', 'security/accounts', SRR['invalid_value_error']),
+ ('POST', 'security/accounts', SRR['success']),
+ ])
+ module_args = {
+ "use_rest": "always",
+ 'role_name': 'vsadmin',
+ 'application_dicts': [
+ dict(application='service_processor', authentication_methods=['usm'])
+ ]
+ }
+ assert 'invalid value' in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+ module_args['application_dicts'] = [dict(application='service-processor', authentication_methods=['usm'])]
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_validate_application():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ])
+ module_args = {
+ "use_rest": "always",
+ 'role_name': 'vsadmin',
+ 'application_dicts':
+ [dict(application='http', authentication_methods=['password'])],
+ 'password': 'newvalue',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert 'second_authentication_method' in my_obj.parameters['applications'][0]
+ my_obj.parameters['applications'][0].pop('second_authentication_method')
+ my_obj.validate_applications()
+ assert 'second_authentication_method' in my_obj.parameters['applications'][0]
+ assert my_obj.parameters['applications'][0]['second_authentication_method'] is None
+
+
+def test_sp_transform():
+ current = {'applications': []}
+ sp_app_u = 'service_processor'
+ sp_app_d = 'service-processor'
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'cluster', SRR['is_rest']),
+ ])
+ # 1. no change using underscore
+ module_args = {
+ "use_rest": "always",
+ 'role_name': 'vsadmin',
+ 'application_dicts': [
+ {'application': sp_app_u, 'authentication_methods': ['password']}
+ ],
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ my_obj.change_sp_application([])
+ sp_apps = [application['application'] for application in my_obj.parameters['applications'] if application['application'].startswith('service')]
+ assert sp_apps == [sp_app_u]
+ # 2. change underscore -> dash
+ my_obj.change_sp_application([{'application': sp_app_d}])
+ sp_apps = [application['application'] for application in my_obj.parameters['applications'] if application['application'].startswith('service')]
+ assert sp_apps == [sp_app_d]
+ # 3. no change using dash
+ module_args['application_dicts'] = [{'application': sp_app_d, 'authentication_methods': ['password']}]
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ my_obj.change_sp_application([])
+ sp_apps = [application['application'] for application in my_obj.parameters['applications'] if application['application'].startswith('service')]
+ assert sp_apps == [sp_app_d]
+ # 4. change dash -> underscore
+ my_obj.change_sp_application([{'application': sp_app_u}])
+ sp_apps = [application['application'] for application in my_obj.parameters['applications'] if application['application'].startswith('service')]
+ assert sp_apps == [sp_app_u]
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_user_role.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_user_role.py
new file mode 100644
index 000000000..9fafd8a68
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_user_role.py
@@ -0,0 +1,139 @@
+# (c) 2018-2023, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test template for ONTAP Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ patch_ansible, create_module, create_and_apply, expect_and_capture_ansible_exception
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_user_role \
+ import NetAppOntapUserRole as role_module # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+def build_role_info(access_level='all'):
+ return {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'security-login-role-info': {
+ 'access-level': access_level,
+ 'command-directory-name': 'volume',
+ 'role-name': 'testrole',
+ 'role-query': 'show',
+ 'vserver': 'ansible'
+ }
+ }
+ }
+
+
+ZRR = zapi_responses({
+ 'build_role_info': build_zapi_response(build_role_info()),
+ 'build_role_modified': build_zapi_response(build_role_info('none'))
+})
+
+DEFAULT_ARGS = {
+ 'name': 'testrole',
+ 'vserver': 'ansible',
+ 'command_directory_name': 'volume',
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'https': 'False',
+ 'use_rest': 'never'
+}
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ # with python 2.6, dictionaries are not ordered
+ fragments = ["missing required arguments:", "hostname", "name"]
+ error = create_module(role_module, {}, fail=True)['msg']
+ for fragment in fragments:
+ assert fragment in error
+
+
+def test_get_nonexistent_policy():
+ ''' Test if get_role returns None for non-existent role '''
+ register_responses([
+ ('ZAPI', 'security-login-role-get-iter', ZRR['empty']),
+ ])
+ my_obj = create_module(role_module, DEFAULT_ARGS)
+ assert my_obj.get_role() is None
+
+
+def test_get_existing_role():
+ ''' Test if get_role returns details for existing role '''
+ register_responses([
+ ('ZAPI', 'security-login-role-get-iter', ZRR['build_role_info']),
+ ])
+ my_obj = create_module(role_module, DEFAULT_ARGS)
+ current = my_obj.get_role()
+ assert current['name'] == DEFAULT_ARGS['name']
+
+
+def test_successful_create():
+ ''' Test successful create '''
+ register_responses([
+ ('ZAPI', 'security-login-role-get-iter', ZRR['empty']),
+ ('ZAPI', 'security-login-role-create', ZRR['success']),
+ # idempotency check
+ ('ZAPI', 'security-login-role-get-iter', ZRR['build_role_info']),
+ ])
+ assert create_and_apply(role_module, DEFAULT_ARGS)['changed']
+ assert not create_and_apply(role_module, DEFAULT_ARGS)['changed']
+
+
+def test_successful_modify():
+ ''' Test successful modify '''
+ register_responses([
+ ('ZAPI', 'security-login-role-get-iter', ZRR['build_role_info']),
+ ('ZAPI', 'security-login-role-modify', ZRR['success']),
+ # idempotency check
+ ('ZAPI', 'security-login-role-get-iter', ZRR['build_role_modified']),
+ ])
+ assert create_and_apply(role_module, DEFAULT_ARGS, {'access_level': 'none'})['changed']
+ assert not create_and_apply(role_module, DEFAULT_ARGS, {'access_level': 'none'})['changed']
+
+
+def test_successful_delete():
+ ''' Test delete existing role '''
+ register_responses([
+ ('ZAPI', 'security-login-role-get-iter', ZRR['build_role_info']),
+ ('ZAPI', 'security-login-role-delete', ZRR['success']),
+ # idempotency check
+ ('ZAPI', 'security-login-role-get-iter', ZRR['empty']),
+ ])
+ assert create_and_apply(role_module, DEFAULT_ARGS, {'state': 'absent'})['changed']
+ assert not create_and_apply(role_module, DEFAULT_ARGS, {'state': 'absent'})['changed']
+
+
+def test_if_all_methods_catch_exception():
+ register_responses([
+ ('ZAPI', 'security-login-role-get-iter', ZRR['error']),
+ ('ZAPI', 'security-login-role-create', ZRR['error']),
+ ('ZAPI', 'security-login-role-modify', ZRR['error']),
+ ('ZAPI', 'security-login-role-delete', ZRR['error'])
+ ])
+ my_obj = create_module(role_module, DEFAULT_ARGS)
+ assert 'Error getting role' in expect_and_capture_ansible_exception(my_obj.get_role, 'fail')['msg']
+ assert 'Error creating role' in expect_and_capture_ansible_exception(my_obj.create_role, 'fail')['msg']
+ assert 'Error modifying role' in expect_and_capture_ansible_exception(my_obj.modify_role, 'fail', {})['msg']
+ assert 'Error removing role' in expect_and_capture_ansible_exception(my_obj.delete_role, 'fail')['msg']
+
+ DEFAULT_ARGS_COPY = DEFAULT_ARGS.copy()
+ del DEFAULT_ARGS_COPY['command_directory_name']
+ assert 'Error: command_directory_name is required' in create_module(role_module, DEFAULT_ARGS_COPY, fail=True)['msg']
+
+ DEFAULT_ARGS_COPY = DEFAULT_ARGS.copy()
+ del DEFAULT_ARGS_COPY['vserver']
+ assert 'Error: vserver is required' in create_module(role_module, DEFAULT_ARGS_COPY, fail=True)['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_user_role_rest.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_user_role_rest.py
new file mode 100644
index 000000000..b6e1e0b95
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_user_role_rest.py
@@ -0,0 +1,647 @@
+# (c) 2022-2023, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import sys
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args, \
+ patch_ansible, create_and_apply, create_module, expect_and_capture_ansible_exception, assert_warning_was_raised, print_warnings
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import get_mock_record, \
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_user_role \
+ import NetAppOntapUserRole as my_module, main as my_main # module under test
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+SRR = rest_responses({
+ 'user_role_9_10': (200, {
+ "owner": {
+ "name": "svm1",
+ "uuid": "02c9e252-41be-11e9-81d5-00a0986138f7"
+ },
+ "privileges": [
+ {
+ "access": "readonly",
+ "path": "/api/storage/volumes"
+ }
+ ],
+ "name": "admin",
+ "scope": "cluster"
+ }, None),
+ 'user_role_9_11_command': (200, {
+ "owner": {
+ "name": "svm1",
+ "uuid": "02c9e252-41be-11e9-81d5-00a0986138f7"
+ },
+ "privileges": [
+ {
+ "path": "job schedule interval",
+ 'query': "-days <1 -hours >12"
+ }, {
+ 'path': 'DEFAULT',
+ 'access': 'none',
+ "_links": {
+ "self": {
+ "href": "/api/resourcelink"
+ }}
+ }
+ ],
+ "name": "admin",
+ "scope": "cluster"
+ }, None),
+ 'user_role_9_10_two_paths': (200, {
+ "owner": {
+ "name": "svm1",
+ "uuid": "02c9e252-41be-11e9-81d5-00a0986138f7"
+ },
+ "privileges": [
+ {
+ "access": "readonly",
+ "path": "/api/storage/volumes"
+ },
+ {
+ "access": "readonly",
+ "path": "/api/cluster/jobs",
+ }
+ ],
+ "name": "admin",
+ "scope": "cluster"
+ }, None),
+ 'user_role_9_10_two_paths_modified': (200, {
+ "owner": {
+ "name": "svm1",
+ "uuid": "02c9e252-41be-11e9-81d5-00a0986138f7"
+ },
+ "privileges": [
+ {"access": "readonly", "path": "/api/storage/volumes"},
+ {"access": "readonly", "path": "/api/cluster/jobs"}
+ ],
+ "name": "admin",
+ "scope": "cluster"
+ }, None),
+ 'user_role_9_11': (200, {
+ "owner": {
+ "name": "svm1",
+ "uuid": "02c9e252-41be-11e9-81d5-00a0986138f7"
+ },
+ "privileges": [
+ {
+ "access": "readonly",
+ "path": "/api/cluster/jobs",
+ }
+ ],
+ "name": "admin",
+ "scope": "cluster"
+ }, None),
+ 'user_role_cluster_jobs_all': (200, {
+ "owner": {
+ "name": "svm1",
+ "uuid": "02c9e252-41be-11e9-81d5-00a0986138f7"
+ },
+ "privileges": [{"access": "all", "path": "/api/cluster/jobs"}],
+ "name": "admin",
+ "scope": "cluster"
+ }, None),
+ 'user_role_privileges': (200, {
+ "records": [
+ {
+ "access": "readonly",
+ "_links": {
+ "self": {
+ "href": "/api/resourcelink"
+ }
+ },
+ "path": "/api/cluster/jobs",
+ }
+ ],
+ }, None),
+ 'user_role_privileges_command': (200, {
+ "records": [
+ {
+ "access": "all",
+ 'query': "-days <1 -hours >12",
+ "_links": {
+ "self": {
+ "href": "/api/resourcelink"
+ }
+ },
+ "path": "job schedule interval",
+ }
+ ],
+ }, None),
+ 'user_role_privileges_two_paths': (200, {
+ "records": [
+ {
+ "access": "readonly",
+ "_links": {
+ "self": {
+ "href": "/api/resourcelink"
+ }
+ },
+ "path": "/api/cluster/jobs",
+ }, {
+ "access": "readonly",
+ "_links": {
+ "self": {
+ "href": "/api/resourcelink"
+ }
+ },
+ "path": "/api/storage/volumes",
+ }
+ ],
+ }, None),
+ 'user_role_volume': (200, {
+ "owner": {
+ "name": "svm1",
+ "uuid": "02c9e252-41be-11e9-81d5-00a0986138f7"
+ },
+ "privileges": [
+ {
+ "access": "readonly",
+ "path": "volume create"
+ },
+ {
+ "access": "readonly",
+ "path": "volume modify",
+ },
+ {
+ "access": "readonly",
+ "path": "volume show",
+ }
+ ],
+ "name": "admin",
+ }, None),
+ 'user_role_vserver': (200, {
+ "owner": {
+ "name": "svm1",
+ "uuid": "02c9e252-41be-11e9-81d5-00a0986138f7"
+ },
+ "privileges": [{"access": "readonly", "path": "vserver show"}],
+ "name": "admin",
+ }, None),
+ 'user_role_volume_privileges': (200, {
+ "records": [
+ {"access": "readonly", "path": "volume create"},
+ {"access": "readonly", "path": "volume modify"}
+ ],
+ }, None),
+ 'user_role_privileges_schedule': (200, {
+ "owner": {
+ "name": "svm1",
+ "uuid": "02c9e252-41be-11e9-81d5-00a0986138f7"
+ },
+ "privileges": [{"access": "all", "path": "job schedule interval", "query": "-days <1 -hours >12"}],
+ "name": "admin",
+ }, None),
+ 'user_role_privileges_schedule_modify': (200, {
+ "owner": {
+ "name": "svm1",
+ "uuid": "02c9e252-41be-11e9-81d5-00a0986138f7"
+ },
+ "privileges": [{"access": "all", "path": "job schedule interval", "query": "-days <1 -hours >8"}],
+ "name": "admin",
+ }, None),
+ 'user_role_volume_with_query': (200, {
+ "owner": {
+ "name": "svm1",
+ "uuid": "02c9e252-41be-11e9-81d5-00a0986138f7"
+ },
+ "privileges": [{"access": "readonly", "path": "/api/storage/volumes", "query": "-vserver vs1|vs2|vs3 -destination-aggregate aggr1|aggr2"}],
+ "name": "admin",
+ "scope": "cluster"
+ }, None),
+ "error_4": (409, None, {'code': 4, 'message': "entry doesn't exist, 'target': 'path'"}),
+})
+
+PRIVILEGES_SINGLE_WITH_QUERY = [{
+ "path": "job schedule interval",
+ 'query': "-days <1 -hours >12"
+}]
+
+PRIVILEGES_PATH_ONLY = [{
+ "path": "/api/cluster/jobs"
+}]
+
+PRIVILEGES_2_PATH_ONLY = [{
+ "path": "/api/cluster/jobs"
+}, {
+ "path": "/api/storage/volumes"
+}]
+
+PRIVILEGES = [{
+ 'path': '/api/storage/volumes',
+ 'access': 'readonly'
+}]
+
+PRIVILEGES_911 = [{
+ 'path': '/api/storage/volumes',
+ 'access': 'readonly',
+}]
+
+PRIVILEGES_MODIFY = [{
+ 'path': '/api/cluster/jobs',
+ 'access': 'all'
+}]
+
+PRIVILEGES_COMMAND_MODIFY = [{
+ 'path': 'job schedule interval',
+ 'query': "-days <1 -hours >8"
+}]
+
+PRIVILEGES_MODIFY_911 = [{
+ 'path': '/api/cluster/jobs',
+ 'access': 'all',
+}]
+
+PRIVILEGES_MODIFY_NEW_PATH = [{
+ 'path': '/api/cluster/jobs',
+ 'access': 'all'
+}, {
+ "path": "/api/storage/volumes",
+ "access": 'all'
+}]
+
+PRIVILEGES_MODIFY_NEW_PATH_9_11 = [{
+ 'path': '/api/cluster/jobs',
+ 'access': 'all',
+}, {
+ "path": "/api/storage/volumes",
+ "access": 'all',
+}]
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'name': 'admin',
+ 'vserver': 'svm1'
+}
+
+
+def test_privileges_query_in_9_10():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ])
+ module_args = {'privileges': PRIVILEGES_SINGLE_WITH_QUERY,
+ 'use_rest': 'always'}
+ my_module_object = create_module(my_module, DEFAULT_ARGS, module_args, fail=True)
+ msg = 'Minimum version of ONTAP for privileges.query is (9, 11, 1)'
+ assert msg in my_module_object['msg']
+
+
+def test_get_user_role_none():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'security/roles', SRR['empty_records'])
+ ])
+ set_module_args(DEFAULT_ARGS)
+ my_obj = my_module()
+ assert my_obj.get_role() is None
+
+
+def test_get_user_role_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'security/roles', SRR['generic_error'])
+ ])
+ my_module_object = create_module(my_module, DEFAULT_ARGS)
+ msg = 'Error getting role admin: calling: security/roles: got Expected error.'
+ assert msg in expect_and_capture_ansible_exception(my_module_object.get_role, 'fail')['msg']
+
+
+def test_get_user_role():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'security/roles', SRR['user_role_9_10'])
+ ])
+ set_module_args(DEFAULT_ARGS)
+ my_obj = my_module()
+ assert my_obj.get_role() is not None
+
+
+def test_get_user_role_9_11():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_1']),
+ ('GET', 'security/roles', SRR['user_role_9_11'])
+ ])
+ set_module_args(DEFAULT_ARGS)
+ my_obj = my_module()
+ assert my_obj.get_role() is not None
+
+
+def test_create_user_role_9_10_new_format():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'security/roles', SRR['empty_records']),
+ ('POST', 'security/roles', SRR['empty_good']),
+ ('GET', 'security/roles', SRR['user_role_9_10'])
+ ])
+ module_args = {'privileges': PRIVILEGES}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_create_user_role_9_11_new_format():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_1']),
+ ('GET', 'security/roles', SRR['empty_records']),
+ ('POST', 'security/roles', SRR['empty_good']),
+ ('GET', 'security/roles', SRR['user_role_9_10'])
+ ])
+ module_args = {'privileges': PRIVILEGES_911}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_create_user_role_9_11_new_format_query():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_1']),
+ ('GET', 'security/roles', SRR['empty_records']),
+ ('POST', 'security/roles', SRR['empty_good']),
+ ('GET', 'security/roles', SRR['user_role_privileges_schedule'])
+ ])
+ module_args = {'privileges': PRIVILEGES_SINGLE_WITH_QUERY}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_create_user_role_9_10_new_format_path_only():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'security/roles', SRR['empty_records']),
+ ('POST', 'security/roles', SRR['empty_good']),
+ ('GET', 'security/roles', SRR['user_role_9_11'])
+ ])
+ module_args = {'privileges': PRIVILEGES_PATH_ONLY}
+ print(module_args)
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_create_user_role_9_10_new_format_2_path_only():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'security/roles', SRR['empty_records']),
+ ('POST', 'security/roles', SRR['empty_good']),
+ ('GET', 'security/roles', SRR['user_role_9_10_two_paths'])
+ ])
+ module_args = {'privileges': PRIVILEGES_2_PATH_ONLY}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_create_user_role_9_10_old_format():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_1']),
+ ('GET', 'security/roles', SRR['empty_records']),
+ ('POST', 'security/roles', SRR['empty_good']),
+ ('GET', 'security/roles', SRR['user_role_9_10'])
+ ])
+ module_args = {'command_directory_name': "/api/storage/volumes",
+ 'access_level': 'readonly'}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_create_user_role_9_11_old_format_with_query():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_1']),
+ ('GET', 'security/roles', SRR['empty_records']),
+ ('POST', 'security/roles', SRR['empty_good']),
+ ('GET', 'security/roles', SRR['user_role_volume_with_query'])
+ ])
+ module_args = {'command_directory_name': "/api/storage/volumes",
+ 'access_level': 'readonly',
+ 'query': "-vserver vs1|vs2|vs3 -destination-aggregate aggr1|aggr2"}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_create_user_role_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('POST', 'security/roles', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['privileges'] = PRIVILEGES
+ error = expect_and_capture_ansible_exception(my_obj.create_role, 'fail')['msg']
+ print('Info: %s' % error)
+ assert 'Error creating role admin: calling: security/roles: got Expected error.' == error
+
+
+def test_delete_user_role():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_1']),
+ ('GET', 'security/roles', SRR['user_role_9_10']),
+ ('DELETE', 'security/roles/02c9e252-41be-11e9-81d5-00a0986138f7/admin', SRR['empty_good']),
+ ('GET', 'security/roles', SRR['empty_records'])
+ ])
+ module_args = {'state': 'absent',
+ 'command_directory_name': "/api/storage/volumes",
+ 'access_level': 'readonly'}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_delete_user_role_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('DELETE', 'security/roles/02c9e252-41be-11e9-81d5-00a0986138f7/admin', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['privileges'] = PRIVILEGES
+ my_obj.parameters['state'] = 'absent'
+ my_obj.owner_uuid = '02c9e252-41be-11e9-81d5-00a0986138f7'
+ error = expect_and_capture_ansible_exception(my_obj.delete_role, 'fail')['msg']
+ print('Info: %s' % error)
+ assert 'Error deleting role admin: calling: security/roles/02c9e252-41be-11e9-81d5-00a0986138f7/admin: got Expected error.' == error
+
+
+def test_modify_user_role_9_10():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'security/roles', SRR['user_role_9_10']),
+ ('GET', 'security/roles/02c9e252-41be-11e9-81d5-00a0986138f7/admin/privileges', SRR['user_role_privileges']),
+ ('PATCH', 'security/roles/02c9e252-41be-11e9-81d5-00a0986138f7/admin/privileges/%2Fapi%2Fcluster%2Fjobs', SRR['empty_good']),
+ ('GET', 'security/roles', SRR['user_role_cluster_jobs_all'])
+ ])
+ module_args = {'privileges': PRIVILEGES_MODIFY}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_user_role_command_9_10():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_1']),
+ ('GET', 'security/roles', SRR['user_role_9_11_command']),
+ ('GET', 'security/roles/02c9e252-41be-11e9-81d5-00a0986138f7/admin/privileges', SRR['user_role_privileges_command']),
+ ('PATCH', 'security/roles/02c9e252-41be-11e9-81d5-00a0986138f7/admin/privileges/job schedule interval', SRR['empty_good']),
+ ('GET', 'security/roles', SRR['user_role_privileges_schedule_modify'])
+ ])
+ module_args = {'privileges': PRIVILEGES_COMMAND_MODIFY}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_remove_user_role_9_10():
+ # This test will modify cluster/job, and delete storage/volumes
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'security/roles', SRR['user_role_9_10_two_paths']),
+ ('GET', 'security/roles/02c9e252-41be-11e9-81d5-00a0986138f7/admin/privileges', SRR['user_role_privileges_two_paths']),
+ ('PATCH', 'security/roles/02c9e252-41be-11e9-81d5-00a0986138f7/admin/privileges/%2Fapi%2Fcluster%2Fjobs', SRR['empty_good']),
+ ('DELETE', 'security/roles/02c9e252-41be-11e9-81d5-00a0986138f7/admin/privileges/%2Fapi%2Fstorage%2Fvolumes', SRR['empty_good']),
+ ('GET', 'security/roles', SRR['user_role_cluster_jobs_all'])
+ ])
+ module_args = {'privileges': PRIVILEGES_MODIFY}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_user_role_9_11():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_1']),
+ ('GET', 'security/roles', SRR['user_role_9_11']),
+ ('GET', 'security/roles/02c9e252-41be-11e9-81d5-00a0986138f7/admin/privileges', SRR['user_role_privileges']),
+ ('PATCH', 'security/roles/02c9e252-41be-11e9-81d5-00a0986138f7/admin/privileges/%2Fapi%2Fcluster%2Fjobs', SRR['empty_good']),
+ ('GET', 'security/roles', SRR['user_role_cluster_jobs_all'])
+ ])
+ module_args = {'privileges': PRIVILEGES_MODIFY_911}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_user_role_create_new_privilege():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'security/roles', SRR['user_role_9_10']),
+ ('GET', 'security/roles/02c9e252-41be-11e9-81d5-00a0986138f7/admin/privileges', SRR['user_role_privileges']),
+ ('PATCH', 'security/roles/02c9e252-41be-11e9-81d5-00a0986138f7/admin/privileges/%2Fapi%2Fcluster%2Fjobs', SRR['empty_good']), # First path
+ ('POST', 'security/roles/02c9e252-41be-11e9-81d5-00a0986138f7/admin/privileges', SRR['empty_good']), # Second path
+ ('GET', 'security/roles', SRR['user_role_9_10_two_paths_modified'])
+ ])
+ module_args = {'privileges': PRIVILEGES_MODIFY_NEW_PATH}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_user_role_create_new_privilege_9_11():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_1']),
+ ('GET', 'security/roles', SRR['user_role_9_11']),
+ ('GET', 'security/roles/02c9e252-41be-11e9-81d5-00a0986138f7/admin/privileges', SRR['user_role_privileges']),
+ ('PATCH', 'security/roles/02c9e252-41be-11e9-81d5-00a0986138f7/admin/privileges/%2Fapi%2Fcluster%2Fjobs', SRR['empty_good']), # First path
+ ('POST', 'security/roles/02c9e252-41be-11e9-81d5-00a0986138f7/admin/privileges', SRR['empty_good']), # Second path
+ ('GET', 'security/roles', SRR['empty_records'])
+ ])
+ module_args = {'privileges': PRIVILEGES_MODIFY_NEW_PATH_9_11}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_remove_user_role_error():
+ # This test will modify cluster/job, and delete storage/volumes
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('DELETE', 'security/roles/02c9e252-41be-11e9-81d5-00a0986138f7/admin/privileges/%2Fapi%2Fstorage%2Fvolumes', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['name'] = 'admin'
+ my_obj.owner_uuid = '02c9e252-41be-11e9-81d5-00a0986138f7'
+ error = expect_and_capture_ansible_exception(my_obj.delete_role_privilege, 'fail', '/api/storage/volumes')['msg']
+ print('Info: %s' % error)
+ assert 'Error deleting role privileges admin: calling: security/roles/02c9e252-41be-11e9-81d5-00a0986138f7/admin/privileges/%2Fapi%2Fstorage%2Fvolumes: '\
+ 'got Expected error.' == error
+
+
+def test_get_user_role_privileges_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'security/roles/02c9e252-41be-11e9-81d5-00a0986138f7/admin/privileges', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['name'] = 'admin'
+ my_obj.owner_uuid = '02c9e252-41be-11e9-81d5-00a0986138f7'
+ error = expect_and_capture_ansible_exception(my_obj.get_role_privileges_rest, 'fail')['msg']
+ print('Info: %s' % error)
+ assert 'Error getting role privileges for role admin: calling: security/roles/02c9e252-41be-11e9-81d5-00a0986138f7/admin/privileges: '\
+ 'got Expected error.' == error
+
+
+def test_create_user_role_privileges_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('POST', 'security/roles/02c9e252-41be-11e9-81d5-00a0986138f7/admin/privileges', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['name'] = 'admin'
+ my_obj.owner_uuid = '02c9e252-41be-11e9-81d5-00a0986138f7'
+ error = expect_and_capture_ansible_exception(my_obj.create_role_privilege, 'fail', PRIVILEGES[0])['msg']
+ print('Info: %s' % error)
+ assert 'Error creating role privilege /api/storage/volumes: calling: security/roles/02c9e252-41be-11e9-81d5-00a0986138f7/admin/privileges: '\
+ 'got Expected error.' == error
+
+
+def test_modify_user_role_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'security/roles/02c9e252-41be-11e9-81d5-00a0986138f7/admin/privileges', SRR['user_role_privileges']),
+ ('PATCH', 'security/roles/02c9e252-41be-11e9-81d5-00a0986138f7/admin/privileges/%2Fapi%2Fcluster%2Fjobs', SRR['generic_error'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.parameters['privileges'] = PRIVILEGES_MODIFY
+ my_obj.owner_uuid = '02c9e252-41be-11e9-81d5-00a0986138f7'
+ current = {'privileges': PRIVILEGES_MODIFY}
+ error = expect_and_capture_ansible_exception(my_obj.modify_role, 'fail', current)['msg']
+ print('Info: %s' % error)
+ assert 'Error modifying privileges for path %2Fapi%2Fcluster%2Fjobs: calling: '\
+ 'security/roles/02c9e252-41be-11e9-81d5-00a0986138f7/admin/privileges/%2Fapi%2Fcluster%2Fjobs: '\
+ 'got Expected error.' == error
+
+
+def test_command_directory_present_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1'])
+ ])
+ assert 'Error: either path or command_directory_name is required' in create_and_apply(my_module, DEFAULT_ARGS, fail=True)['msg']
+
+
+def test_warnings_additional_commands_added_after_create():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_1']),
+ ('GET', 'security/roles', SRR['empty_records']),
+ ('POST', 'security/roles', SRR['empty_good']),
+ ('GET', 'security/roles', SRR['user_role_volume'])
+ ])
+ args = {'privileges': [{'path': 'volume create', 'access': 'all'}]}
+ assert create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+ assert_warning_was_raised("Create operation also affected additional related commands", partial_match=True)
+
+
+def test_warnings_create_required_after_modify():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_1']),
+ ('GET', 'security/roles', SRR['user_role_volume']),
+ ('GET', 'security/roles/02c9e252-41be-11e9-81d5-00a0986138f7/admin/privileges', SRR['user_role_volume_privileges']),
+ ('DELETE', 'security/roles/02c9e252-41be-11e9-81d5-00a0986138f7/admin/privileges/volume modify', SRR['empty_good']),
+ ('GET', 'security/roles', SRR['empty_records']),
+ ])
+ args = {'privileges': [{'path': 'volume create', 'access': 'readonly'}]}
+ assert create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+ assert_warning_was_raised("Create role is required", partial_match=True)
+
+
+def test_warnings_modify_required_after_original_modify():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_1']),
+ ('GET', 'security/roles', SRR['user_role_volume']),
+ ('GET', 'security/roles/02c9e252-41be-11e9-81d5-00a0986138f7/admin/privileges', SRR['user_role_volume_privileges']),
+ ('DELETE', 'security/roles/02c9e252-41be-11e9-81d5-00a0986138f7/admin/privileges/volume modify', SRR['error_4']),
+ ('GET', 'security/roles', SRR['user_role_vserver']),
+ ])
+ args = {'privileges': [{'path': 'volume create', 'access': 'readonly'}]}
+ assert create_and_apply(my_module, DEFAULT_ARGS, args)['changed']
+ assert_warning_was_raised("modify is required, desired", partial_match=True)
+
+
+def test_error_with_legacy_commands_9_10_1():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1'])
+ ])
+ args = {'privileges': [{'path': 'volume create', 'access': 'readonly'}]}
+ assert "Error: Invalid URI ['volume create']" in create_module(my_module, DEFAULT_ARGS, args, fail=True)['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_volume.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_volume.py
new file mode 100644
index 000000000..3161ead04
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_volume.py
@@ -0,0 +1,2011 @@
+# (c) 2018-2023, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test template for ONTAP Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import copy
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import \
+ assert_warning_was_raised, call_main, create_module, create_and_apply, expect_and_capture_ansible_exception, patch_ansible, print_warnings
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import \
+ get_mock_record, patch_request_and_invoke, print_requests, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_error, build_zapi_response, zapi_error_message, zapi_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_volume \
+ import NetAppOntapVolume as vol_module, main as my_main # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+# REST API canned responses when mocking send_request
+SRR = {
+ # common responses
+ 'is_rest': (200, {}, None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'end_of_sequence': (500, None, "Unexpected call to send_request"),
+}
+
+MOCK_VOL = {
+ 'name': 'test_vol',
+ 'aggregate': 'test_aggr',
+ 'junction_path': '/test',
+ 'vserver': 'test_vserver',
+ 'size': 20971520,
+ 'unix_permissions': '755',
+ 'user_id': 100,
+ 'group_id': 1000,
+ 'snapshot_policy': 'default',
+ 'qos_policy_group': 'performance',
+ 'qos_adaptive_policy_group': 'performance',
+ 'percent_snapshot_space': 60,
+ 'language': 'en',
+ 'vserver_dr_protection': 'unprotected',
+ 'uuid': 'UUID'
+}
+
+
+def volume_info(style, vol_details=None, remove_keys=None, encrypt='false'):
+ if not vol_details:
+ vol_details = MOCK_VOL
+ info = copy.deepcopy({
+ 'num-records': 1,
+ 'attributes-list': {
+ 'volume-attributes': {
+ 'encrypt': encrypt,
+ 'volume-id-attributes': {
+ 'aggr-list': vol_details['aggregate'],
+ 'containing-aggregate-name': vol_details['aggregate'],
+ 'flexgroup-uuid': 'uuid',
+ 'junction-path': vol_details['junction_path'],
+ 'style-extended': style,
+ 'type': 'rw'
+ },
+ 'volume-comp-aggr-attributes': {
+ 'tiering-policy': 'snapshot-only'
+ },
+ 'volume-language-attributes': {
+ 'language-code': 'en'
+ },
+ 'volume-export-attributes': {
+ 'policy': 'default'
+ },
+ 'volume-performance-attributes': {
+ 'is-atime-update-enabled': 'true'
+ },
+ 'volume-state-attributes': {
+ 'state': "online",
+ 'is-nvfail-enabled': 'true'
+ },
+ 'volume-inode-attributes': {
+ 'files-total': '2000',
+ },
+ 'volume-space-attributes': {
+ 'space-guarantee': 'none',
+ 'size': vol_details['size'],
+ 'percentage-snapshot-reserve': vol_details['percent_snapshot_space'],
+ 'space-slo': 'thick'
+ },
+ 'volume-snapshot-attributes': {
+ 'snapshot-policy': vol_details['snapshot_policy']
+ },
+ 'volume-security-attributes': {
+ 'volume-security-unix-attributes': {
+ 'permissions': vol_details['unix_permissions'],
+ 'group-id': vol_details['group_id'],
+ 'user-id': vol_details['user_id']
+ },
+ 'style': 'unix',
+ },
+ 'volume-vserver-dr-protection-attributes': {
+ 'vserver-dr-protection': vol_details['vserver_dr_protection'],
+ },
+ 'volume-qos-attributes': {
+ 'policy-group-name': vol_details['qos_policy_group'],
+ 'adaptive-policy-group-name': vol_details['qos_adaptive_policy_group']
+ },
+ 'volume-snapshot-autodelete-attributes': {
+ 'commitment': 'try',
+ 'is-autodelete-enabled': 'true',
+ }
+ }
+ }
+ })
+ if remove_keys:
+ for key in remove_keys:
+ if key == 'is_online':
+ del info['attributes-list']['volume-attributes']['volume-state-attributes']['state']
+ else:
+ raise KeyError('unexpected key %s' % key)
+ return info
+
+
+def vol_encryption_conversion_status(status):
+ return {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'volume-encryption-conversion-info': {
+ 'status': status
+ }
+ }
+ }
+
+
+def vol_move_status(status):
+ return {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'volume-move-info': {
+ 'state': status,
+ 'details': 'some info'
+ }
+ }
+ }
+
+
+def job_info(state, error):
+ return {
+ 'num-records': 1,
+ 'attributes': {
+ 'job-info': {
+ 'job-state': state,
+ 'job-progress': 'progress',
+ 'job-completion': error,
+ }
+ }
+ }
+
+
+def results_info(status):
+ return {
+ 'result-status': status,
+ 'result-jobid': 'job12345',
+ }
+
+
+def modify_async_results_info(status, error=None):
+ list_name = 'failure-list' if error else 'success-list'
+ info = {
+ list_name: {
+ 'volume-modify-iter-async-info': {
+ 'status': status,
+ 'jobid': '1234'
+ }
+ }
+ }
+ if error:
+ info[list_name]['volume-modify-iter-async-info']['error-message'] = error
+ return info
+
+
+def sis_info():
+ return {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'sis-status-info': {
+ 'policy': 'test',
+ 'is-compression-enabled': 'true',
+ 'sis-status-completion': 'false',
+ }
+ }
+ }
+
+
+ZRR = zapi_responses({
+ 'get_flexgroup': build_zapi_response(volume_info('flexgroup')),
+ 'get_flexvol': build_zapi_response(volume_info('flexvol')),
+ 'get_flexvol_encrypted': build_zapi_response(volume_info('flexvol', encrypt='true')),
+ 'get_flexvol_no_online_key': build_zapi_response(volume_info('flexvol', remove_keys=['is_online'])),
+ 'job_failure': build_zapi_response(job_info('failure', 'failure')),
+ 'job_other': build_zapi_response(job_info('other', 'other_error')),
+ 'job_running': build_zapi_response(job_info('running', None)),
+ 'job_success': build_zapi_response(job_info('success', None)),
+ 'job_time_out': build_zapi_response(job_info('running', 'time_out')),
+ 'job_no_completion': build_zapi_response(job_info('failure', None)),
+ 'async_results': build_zapi_response(results_info('in_progress')),
+ 'failed_results': build_zapi_response(results_info('failed')),
+ 'modify_async_result_success': build_zapi_response(modify_async_results_info('in_progress')),
+ 'modify_async_result_failure': build_zapi_response(modify_async_results_info('failure', 'error_in_modify')),
+ 'vol_encryption_conversion_status_running': build_zapi_response(vol_encryption_conversion_status('running')),
+ 'vol_encryption_conversion_status_idle': build_zapi_response(vol_encryption_conversion_status('Not currently going on.')),
+ 'vol_encryption_conversion_status_error': build_zapi_response(vol_encryption_conversion_status('other')),
+ 'vol_move_status_running': build_zapi_response(vol_move_status('healthy')),
+ 'vol_move_status_idle': build_zapi_response(vol_move_status('done')),
+ 'vol_move_status_error': build_zapi_response(vol_move_status('failed')),
+ 'insufficient_privileges': build_zapi_error(12346, 'Insufficient privileges: user USERID does not have read access to this resource'),
+ 'get_sis_info': build_zapi_response(sis_info()),
+ 'error_15661': build_zapi_error(15661, 'force job not found error'),
+ 'error_tiering_94': build_zapi_error(94, 'volume-comp-aggr-attributes')
+})
+
+
+MINIMUM_ARGS = {
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'name': 'test_vol',
+ 'vserver': 'test_vserver',
+ 'use_rest': 'never'
+}
+
+
+DEFAULT_ARGS = {
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'name': 'test_vol',
+ 'vserver': 'test_vserver',
+ 'policy': 'default',
+ 'language': 'en',
+ 'is_online': True,
+ 'unix_permissions': '---rwxr-xr-x',
+ 'user_id': 100,
+ 'group_id': 1000,
+ 'snapshot_policy': 'default',
+ 'qos_policy_group': 'performance',
+ 'qos_adaptive_policy_group': 'performance',
+ 'size': 20,
+ 'size_unit': 'mb',
+ 'junction_path': '/test',
+ 'percent_snapshot_space': 60,
+ 'type': 'rw',
+ 'nvfail_enabled': True,
+ 'space_slo': 'thick',
+ 'use_rest': 'never'
+}
+
+
+ZAPI_ERROR = 'NetApp API failed. Reason - 12345:synthetic error for UT purpose'
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ error = create_module(vol_module, {}, fail=True)
+ print('Info: %s' % error['msg'])
+ assert 'missing required arguments:' in error['msg']
+
+
+def test_get_nonexistent_volume():
+ ''' Test if get_volume returns None for non-existent volume '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['success']),
+ ])
+ assert create_module(vol_module, DEFAULT_ARGS).get_volume() is None
+
+
+def test_get_error():
+ ''' Test if get_volume handles error '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['error']),
+ ])
+ error = 'Error fetching volume test_vol : %s' % ZAPI_ERROR
+ assert expect_and_capture_ansible_exception(create_module(vol_module, DEFAULT_ARGS).get_volume, 'fail')['msg'] == error
+
+
+def test_get_existing_volume():
+ ''' Test if get_volume returns details for existing volume '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexvol']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ])
+ volume_info = create_module(vol_module, DEFAULT_ARGS).get_volume()
+ assert volume_info is not None
+ assert 'aggregate_name' in volume_info
+
+
+def test_create_error_missing_param():
+ ''' Test if create throws an error if aggregate_name is not specified'''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['no_records']),
+ ])
+ module_args = {
+ 'size': 20,
+ 'encrypt': True,
+ }
+ msg = 'Error provisioning volume test_vol: aggregate_name is required'
+ assert msg == create_and_apply(vol_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_successful_create():
+ ''' Test successful create '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['no_records']),
+ ('ZAPI', 'volume-create', ZRR['success']),
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexvol']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ])
+ module_args = {
+ 'aggregate_name': MOCK_VOL['aggregate'],
+ 'size': 20,
+ }
+ assert create_and_apply(vol_module, DEFAULT_ARGS, module_args)['changed']
+
+
+@patch('time.sleep')
+def test_successful_create_with_completion(dont_sleep):
+ ''' Test successful create '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['no_records']),
+ ('ZAPI', 'volume-create', ZRR['success']),
+ ('ZAPI', 'volume-get-iter', ZRR['no_records']), # wait for online
+ ('ZAPI', 'volume-get-iter', ZRR['no_records']), # wait for online
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexvol']), # wait for online
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexvol']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ])
+ module_args = {
+ 'aggregate_name': MOCK_VOL['aggregate'],
+ 'size': 20,
+ 'wait_for_completion': True
+ }
+ assert create_and_apply(vol_module, DEFAULT_ARGS, module_args)['changed']
+
+
+@patch('time.sleep')
+def test_error_timeout_create_with_completion(dont_sleep):
+ ''' Test successful create '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['no_records']),
+ ('ZAPI', 'volume-create', ZRR['success']),
+ ('ZAPI', 'volume-get-iter', ZRR['no_records']), # wait for online
+ ('ZAPI', 'volume-get-iter', ZRR['no_records']), # wait for online
+ ('ZAPI', 'volume-get-iter', ZRR['no_records']), # wait for online
+ ('ZAPI', 'volume-get-iter', ZRR['no_records']), # wait for online
+ ])
+ module_args = {
+ 'aggregate_name': MOCK_VOL['aggregate'],
+ 'size': 20,
+ 'time_out': 42,
+ 'wait_for_completion': True
+ }
+ error = "Error waiting for volume test_vol to come online: ['Timeout after 42 seconds']"
+ assert create_and_apply(vol_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == error
+
+
+@patch('time.sleep')
+def test_error_timeout_keyerror_create_with_completion(dont_sleep):
+ ''' Test successful create '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['no_records']),
+ ('ZAPI', 'volume-create', ZRR['success']),
+ ('ZAPI', 'volume-get-iter', ZRR['no_records']), # wait for online
+ ('ZAPI', 'volume-get-iter', ZRR['no_records']), # wait for online
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexvol_no_online_key']), # wait for online
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ('ZAPI', 'volume-get-iter', ZRR['no_records']), # wait for online
+ ])
+ module_args = {
+ 'aggregate_name': MOCK_VOL['aggregate'],
+ 'size': 20,
+ 'time_out': 42,
+ 'wait_for_completion': True
+ }
+ error_py3x = '''Error waiting for volume test_vol to come online: ["KeyError('is_online')", 'Timeout after 42 seconds']'''
+ error_py27 = '''Error waiting for volume test_vol to come online: ["KeyError('is_online',)", 'Timeout after 42 seconds']'''
+ error = create_and_apply(vol_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ print('error', error)
+ assert error == error_py3x or error == error_py27
+
+
+def test_error_create():
+ ''' Test error on create '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['no_records']),
+ ('ZAPI', 'volume-create', ZRR['error']),
+ ])
+ module_args = {
+ 'aggregate_name': MOCK_VOL['aggregate'],
+ 'size': 20,
+ 'encrypt': True,
+ }
+ error = 'Error provisioning volume test_vol of size 20971520: %s' % ZAPI_ERROR
+ assert create_and_apply(vol_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == error
+
+
+def test_create_idempotency():
+ ''' Test create idempotency '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexvol']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ])
+ assert not create_and_apply(vol_module, DEFAULT_ARGS)['changed']
+
+
+def test_successful_delete():
+ ''' Test delete existing volume '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexvol']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ('ZAPI', 'volume-destroy', ZRR['success']),
+ ])
+ module_args = {
+ 'state': 'absent',
+ }
+ assert create_and_apply(vol_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_delete():
+ ''' Test delete existing volume '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexvol']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ('ZAPI', 'volume-destroy', ZRR['error']),
+ ('ZAPI', 'volume-destroy', ZRR['error']),
+ ])
+ module_args = {
+ 'state': 'absent',
+ }
+ error = 'Error deleting volume test_vol:'
+ msg = create_and_apply(vol_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert error in msg
+ error = 'volume delete failed with unmount-and-offline option: %s' % ZAPI_ERROR
+ assert error in msg
+ error = 'volume delete failed without unmount-and-offline option: %s' % ZAPI_ERROR
+ assert error in msg
+
+
+def test_error_delete_async():
+ ''' Test delete existing volume '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexgroup']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ('ZAPI', 'volume-unmount', ZRR['error']),
+ ('ZAPI', 'volume-offline-async', ZRR['error']),
+ ('ZAPI', 'volume-destroy-async', ZRR['error']),
+ ])
+ module_args = {
+ 'state': 'absent',
+
+ }
+ error = 'Error deleting volume test_vol: %s' % ZAPI_ERROR
+ msg = create_and_apply(vol_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert error in msg
+ error = 'Error unmounting volume test_vol: %s' % ZAPI_ERROR
+ assert error in msg
+ error = 'Error changing the state of volume test_vol to offline: %s' % ZAPI_ERROR
+ assert error in msg
+
+
+def test_delete_idempotency():
+ ''' Test delete idempotency '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['no_records']),
+ ])
+ module_args = {
+ 'state': 'absent',
+ }
+ assert not create_and_apply(vol_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successful_modify_size():
+ ''' Test successful modify size '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexvol']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ('ZAPI', 'volume-size', ZRR['success']),
+ ])
+ module_args = {
+ 'size': 200,
+ }
+ assert create_and_apply(vol_module, DEFAULT_ARGS, module_args)['changed']
+ print_requests()
+ assert get_mock_record().is_text_in_zapi_request('<new-size>209715200', 2)
+
+
+def test_modify_idempotency():
+ ''' Test modify idempotency '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexvol']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ])
+ assert not create_and_apply(vol_module, DEFAULT_ARGS)['changed']
+
+
+def test_modify_error():
+ ''' Test modify idempotency '''
+ register_responses([
+ ('ZAPI', 'volume-modify-iter', ZRR['error']),
+ ])
+ msg = 'Error modifying volume test_vol: %s' % ZAPI_ERROR
+ assert msg == expect_and_capture_ansible_exception(create_module(vol_module, DEFAULT_ARGS).volume_modify_attributes, 'fail', {})['msg']
+
+
+def test_mount_volume():
+ ''' Test mount volume '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexvol']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ('ZAPI', 'volume-mount', ZRR['success']),
+ ])
+ module_args = {
+ 'junction_path': '/test123',
+ }
+ assert create_and_apply(vol_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_mount_volume():
+ ''' Test mount volume '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexvol']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ('ZAPI', 'volume-mount', ZRR['error']),
+ ])
+ module_args = {
+ 'junction_path': '/test123',
+ }
+ error = 'Error mounting volume test_vol on path /test123: %s' % ZAPI_ERROR
+ assert create_and_apply(vol_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == error
+
+
+def test_unmount_volume():
+ ''' Test unmount volume '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexvol']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ('ZAPI', 'volume-unmount', ZRR['success']),
+ ])
+ module_args = {
+ 'junction_path': '',
+ }
+ assert create_and_apply(vol_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_unmount_volume():
+ ''' Test unmount volume '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexvol']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ('ZAPI', 'volume-unmount', ZRR['error']),
+ ])
+ module_args = {
+ 'junction_path': '',
+ }
+ error = 'Error unmounting volume test_vol: %s' % ZAPI_ERROR
+ assert create_and_apply(vol_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == error
+
+
+def test_successful_modify_space():
+ ''' Test successful modify space '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexvol']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ('ZAPI', 'volume-modify-iter', ZRR['success']),
+ ])
+ args = dict(DEFAULT_ARGS)
+ del args['space_slo']
+ module_args = {
+ 'space_guarantee': 'volume',
+ }
+ assert create_and_apply(vol_module, args, module_args)['changed']
+ print_requests()
+ assert get_mock_record().is_text_in_zapi_request('<space-guarantee>volume', 2)
+
+
+def test_successful_modify_unix_permissions():
+ ''' Test successful modify unix_permissions '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexvol']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ('ZAPI', 'volume-modify-iter', ZRR['success']),
+ ])
+ module_args = {
+ 'unix_permissions': '---rw-r-xr-x',
+ }
+ assert create_and_apply(vol_module, DEFAULT_ARGS, module_args)['changed']
+ print_requests()
+ assert get_mock_record().is_text_in_zapi_request('<permissions>---rw-r-xr-x', 2)
+
+
+def test_successful_modify_volume_security_style():
+ ''' Test successful modify volume_security_style '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexvol']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ('ZAPI', 'volume-modify-iter', ZRR['success']),
+ ])
+ module_args = {
+ 'volume_security_style': 'mixed',
+ }
+ assert create_and_apply(vol_module, DEFAULT_ARGS, module_args)['changed']
+ print_requests()
+ assert get_mock_record().is_text_in_zapi_request('<style>mixed</style>', 2)
+
+
+def test_successful_modify_max_files_and_encrypt():
+ ''' Test successful modify unix_permissions '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexvol']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ('ZAPI', 'volume-modify-iter', ZRR['success']),
+ ('ZAPI', 'volume-encryption-conversion-start', ZRR['success']),
+ ])
+ module_args = {
+ 'encrypt': True,
+ 'max_files': '3000',
+ }
+ assert create_and_apply(vol_module, DEFAULT_ARGS, module_args)['changed']
+ print_requests()
+ assert get_mock_record().is_text_in_zapi_request('<files-total>3000', 2)
+
+
+def test_successful_modify_snapshot_policy():
+ ''' Test successful modify snapshot_policy '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexvol']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ('ZAPI', 'volume-modify-iter', ZRR['success']),
+ ])
+ module_args = {
+ 'snapshot_policy': 'default-1weekly',
+ }
+ assert create_and_apply(vol_module, DEFAULT_ARGS, module_args)['changed']
+ print_requests()
+ assert get_mock_record().is_text_in_zapi_request('<snapshot-policy>default-1weekly', 2)
+
+
+def test_successful_modify_efficiency_policy():
+ ''' Test successful modify efficiency_policy '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexvol']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ('ZAPI', 'sis-enable', ZRR['success']),
+ ('ZAPI', 'sis-set-config', ZRR['success']),
+ ])
+ module_args = {
+ 'efficiency_policy': 'test',
+ 'inline_compression': True
+ }
+ assert create_and_apply(vol_module, DEFAULT_ARGS, module_args)['changed']
+ print_requests()
+ assert get_mock_record().is_text_in_zapi_request('<policy-name>test', 3)
+
+
+def test_successful_modify_efficiency_policy_idempotent():
+ ''' Test successful modify efficiency_policy '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexvol']),
+ ('ZAPI', 'sis-get-iter', ZRR['get_sis_info']),
+ ])
+ module_args = {
+ 'efficiency_policy': 'test',
+ 'compression': True
+ }
+ assert not create_and_apply(vol_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successful_modify_efficiency_policy_async():
+ ''' Test successful modify efficiency_policy '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexgroup']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ('ZAPI', 'sis-enable-async', ZRR['success']),
+ ('ZAPI', 'sis-set-config-async', ZRR['success']),
+ ])
+ module_args = {
+ 'aggr_list': 'aggr_0,aggr_1',
+ 'aggr_list_multiplier': 2,
+ 'efficiency_policy': 'test',
+ 'compression': True,
+ 'wait_for_completion': True,
+ }
+ assert create_and_apply(vol_module, DEFAULT_ARGS, module_args)['changed']
+ print_requests()
+ assert get_mock_record().is_text_in_zapi_request('<policy-name>test', 3)
+
+
+def test_error_set_efficiency_policy():
+ register_responses([
+ ('ZAPI', 'sis-enable', ZRR['error']),
+ ])
+ module_args = {'efficiency_policy': 'test_policy'}
+ msg = 'Error enable efficiency on volume test_vol: %s' % ZAPI_ERROR
+ assert msg == expect_and_capture_ansible_exception(create_module(vol_module, MINIMUM_ARGS, module_args).set_efficiency_config, 'fail')['msg']
+
+
+def test_error_modify_efficiency_policy():
+ ''' Test error modify efficiency_policy '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexvol']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ('ZAPI', 'sis-enable', ZRR['success']),
+ ('ZAPI', 'sis-set-config', ZRR['error']),
+ ])
+ module_args = {
+ 'efficiency_policy': 'test',
+ }
+ error = 'Error setting up efficiency attributes on volume test_vol: %s' % ZAPI_ERROR
+ assert create_and_apply(vol_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == error
+
+
+def test_error_set_efficiency_policy_async():
+ register_responses([
+ ('ZAPI', 'sis-enable-async', ZRR['error']),
+ ])
+ module_args = {'efficiency_policy': 'test_policy'}
+ msg = 'Error enable efficiency on volume test_vol: %s' % ZAPI_ERROR
+ assert msg == expect_and_capture_ansible_exception(create_module(vol_module, MINIMUM_ARGS, module_args).set_efficiency_config_async, 'fail')['msg']
+
+
+def test_error_modify_efficiency_policy_async():
+ ''' Test error modify efficiency_policy '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexgroup']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ('ZAPI', 'sis-enable-async', ZRR['success']),
+ ('ZAPI', 'sis-set-config-async', ZRR['error']),
+ ])
+ module_args = {
+ 'aggr_list': 'aggr_0,aggr_1',
+ 'aggr_list_multiplier': 2,
+ 'efficiency_policy': 'test',
+ }
+ error = 'Error setting up efficiency attributes on volume test_vol: %s' % ZAPI_ERROR
+ assert create_and_apply(vol_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == error
+
+
+def test_successful_modify_percent_snapshot_space():
+ ''' Test successful modify percent_snapshot_space '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexvol']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ('ZAPI', 'volume-modify-iter', ZRR['success']),
+ ])
+ module_args = {
+ 'percent_snapshot_space': 90,
+ }
+ assert create_and_apply(vol_module, DEFAULT_ARGS, module_args)['changed']
+ print_requests()
+ assert get_mock_record().is_text_in_zapi_request('<percentage-snapshot-reserve>90', 2)
+
+
+def test_successful_modify_qos_policy_group():
+ ''' Test successful modify qos_policy_group '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexvol']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ('ZAPI', 'volume-modify-iter', ZRR['success']),
+ ])
+ module_args = {
+ 'qos_policy_group': 'extreme',
+ }
+ assert create_and_apply(vol_module, DEFAULT_ARGS, module_args)['changed']
+ print_requests()
+ assert get_mock_record().is_text_in_zapi_request('<policy-group-name>extreme', 2)
+
+
+def test_successful_modify_qos_adaptive_policy_group():
+ ''' Test successful modify qos_adaptive_policy_group '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexvol']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ('ZAPI', 'volume-modify-iter', ZRR['success']),
+ ])
+ module_args = {
+ 'qos_adaptive_policy_group': 'extreme',
+ }
+ assert create_and_apply(vol_module, DEFAULT_ARGS, module_args)['changed']
+ print_requests()
+ assert get_mock_record().is_text_in_zapi_request('<adaptive-policy-group-name>extreme', 2)
+
+
+def test_successful_move():
+ ''' Test successful modify aggregate '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexvol']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ('ZAPI', 'volume-move-start', ZRR['success']),
+ ('ZAPI', 'volume-move-get-iter', ZRR['vol_move_status_idle']),
+ ])
+ module_args = {
+ 'aggregate_name': 'different_aggr',
+ 'cutover_action': 'abort_on_failure',
+ 'encrypt': True,
+ 'wait_for_completion': True
+ }
+ assert create_and_apply(vol_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_unencrypt_volume():
+ ''' Test successful modify aggregate '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexvol_encrypted']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ('ZAPI', 'volume-move-start', ZRR['success']),
+ ('ZAPI', 'volume-move-get-iter', ZRR['vol_move_status_idle']),
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexvol_encrypted']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ('ZAPI', 'volume-move-start', ZRR['success']),
+ ('ZAPI', 'volume-move-get-iter', ZRR['vol_move_status_idle']),
+ ])
+ # without aggregate
+ module_args = {
+ 'encrypt': False,
+ 'wait_for_completion': True
+ }
+ assert create_and_apply(vol_module, DEFAULT_ARGS, module_args)['changed']
+ # with aggregate.
+ module_args['aggregate_name'] = 'test_aggr'
+ assert create_and_apply(vol_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_move():
+ ''' Test error modify aggregate '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexvol']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ('ZAPI', 'volume-move-start', ZRR['error']),
+ ])
+ module_args = {
+ 'aggregate_name': 'different_aggr',
+ }
+ error = 'Error moving volume test_vol: %s - Retry failed with REST error: False' % ZAPI_ERROR
+ assert create_and_apply(vol_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == error
+
+
+def setup_rename(is_isinfinite=None):
+ module_args = {
+ 'from_name': MOCK_VOL['name'],
+ 'name': 'new_name',
+ 'time_out': 20
+ }
+ current = {
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'name': MOCK_VOL['name'],
+ 'uuid': MOCK_VOL['uuid'],
+ 'vserver': MOCK_VOL['vserver'],
+ }
+ if is_isinfinite is not None:
+ module_args['is_infinite'] = is_isinfinite
+ current['is_infinite'] = is_isinfinite
+ return module_args, current
+
+
+@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_volume.NetAppOntapVolume.get_volume')
+def test_successful_rename(get_volume):
+ ''' Test successful rename volume '''
+ register_responses([
+ ('ZAPI', 'volume-rename', ZRR['success']),
+ ])
+ module_args, current = setup_rename()
+ get_volume.side_effect = [
+ None,
+ current
+ ]
+ assert create_and_apply(vol_module, DEFAULT_ARGS, module_args)['changed']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_volume.NetAppOntapVolume.get_volume')
+def test_error_rename(get_volume):
+ ''' Test error rename volume '''
+ register_responses([
+ ('ZAPI', 'volume-rename', ZRR['error']),
+ ])
+ module_args, current = setup_rename()
+ get_volume.side_effect = [
+ None,
+ current
+ ]
+ error = 'Error renaming volume new_name: %s' % ZAPI_ERROR
+ assert create_and_apply(vol_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == error
+
+
+@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_volume.NetAppOntapVolume.get_volume')
+def test_error_rename_no_from(get_volume):
+ ''' Test error rename volume '''
+ register_responses([
+ ])
+ module_args, current = setup_rename()
+ get_volume.side_effect = [
+ None,
+ None
+ ]
+ error = 'Error renaming volume: cannot find %s' % MOCK_VOL['name']
+ assert create_and_apply(vol_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == error
+
+
+@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_volume.NetAppOntapVolume.get_volume')
+def test_successful_rename_async(get_volume):
+ ''' Test successful rename volume '''
+ register_responses([
+ ('ZAPI', 'volume-rename-async', ZRR['success']),
+ ])
+ module_args, current = setup_rename(is_isinfinite=True)
+ get_volume.side_effect = [
+ None,
+ current
+ ]
+ assert create_and_apply(vol_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_helper():
+ register_responses([
+ ('ZAPI', 'volume-unmount', ZRR['success']),
+ ('ZAPI', 'volume-offline', ZRR['success']),
+ ])
+ module_args = {'is_online': False}
+ modify = {'is_online': False}
+ assert create_module(vol_module, DEFAULT_ARGS, module_args).take_modify_actions(modify) is None
+
+
+def test_compare_chmod_value_true_1():
+ module_args = {'unix_permissions': '------------'}
+ current = {
+ 'unix_permissions': '0'
+ }
+ vol_obj = create_module(vol_module, DEFAULT_ARGS, module_args)
+ assert vol_obj.na_helper.compare_chmod_value(current['unix_permissions'], module_args['unix_permissions'])
+
+
+def test_compare_chmod_value_true_2():
+ module_args = {'unix_permissions': '---rwxrwxrwx'}
+ current = {
+ 'unix_permissions': '777'
+ }
+ vol_obj = create_module(vol_module, DEFAULT_ARGS, module_args)
+ assert vol_obj.na_helper.compare_chmod_value(current['unix_permissions'], module_args['unix_permissions'])
+
+
+def test_compare_chmod_value_true_3():
+ module_args = {'unix_permissions': '---rwxr-xr-x'}
+ current = {
+ 'unix_permissions': '755'
+ }
+ vol_obj = create_module(vol_module, DEFAULT_ARGS, module_args)
+ assert vol_obj.na_helper.compare_chmod_value(current['unix_permissions'], module_args['unix_permissions'])
+
+
+def test_compare_chmod_value_true_4():
+ module_args = {'unix_permissions': '755'}
+ current = {
+ 'unix_permissions': '755'
+ }
+ vol_obj = create_module(vol_module, DEFAULT_ARGS, module_args)
+ assert vol_obj.na_helper.compare_chmod_value(current['unix_permissions'], module_args['unix_permissions'])
+
+
+def test_compare_chmod_value_false_1():
+ module_args = {'unix_permissions': '---rwxrwxrwx'}
+ current = {
+ 'unix_permissions': '0'
+ }
+ vol_obj = create_module(vol_module, DEFAULT_ARGS, module_args)
+ assert not vol_obj.na_helper.compare_chmod_value(current['unix_permissions'], module_args['unix_permissions'])
+
+
+def test_compare_chmod_value_false_2():
+ module_args = {'unix_permissions': '---rwxrwxrwx'}
+ current = None
+ vol_obj = create_module(vol_module, DEFAULT_ARGS, module_args)
+ assert not vol_obj.na_helper.compare_chmod_value(current, module_args['unix_permissions'])
+
+
+def test_compare_chmod_value_invalid_input_1():
+ module_args = {'unix_permissions': '---xwrxwrxwr'}
+ current = {
+ 'unix_permissions': '777'
+ }
+ vol_obj = create_module(vol_module, DEFAULT_ARGS, module_args)
+ assert not vol_obj.na_helper.compare_chmod_value(current['unix_permissions'], module_args['unix_permissions'])
+
+
+def test_compare_chmod_value_invalid_input_2():
+ module_args = {'unix_permissions': '---rwx-wx--a'}
+ current = {
+ 'unix_permissions': '0'
+ }
+ vol_obj = create_module(vol_module, DEFAULT_ARGS, module_args)
+ assert not vol_obj.na_helper.compare_chmod_value(current['unix_permissions'], module_args['unix_permissions'])
+
+
+def test_compare_chmod_value_invalid_input_3():
+ module_args = {'unix_permissions': '---'}
+ current = {
+ 'unix_permissions': '0'
+ }
+ vol_obj = create_module(vol_module, DEFAULT_ARGS, module_args)
+ assert not vol_obj.na_helper.compare_chmod_value(current['unix_permissions'], module_args['unix_permissions'])
+
+
+def test_compare_chmod_value_invalid_input_4():
+ module_args = {'unix_permissions': 'rwx---rwxrwx'}
+ current = {
+ 'unix_permissions': '0'
+ }
+ vol_obj = create_module(vol_module, DEFAULT_ARGS, module_args)
+ assert not vol_obj.na_helper.compare_chmod_value(current['unix_permissions'], module_args['unix_permissions'])
+
+
+def test_successful_create_flex_group_manually():
+ ''' Test successful create flexGroup manually '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['empty']),
+ ('ZAPI', 'volume-create-async', ZRR['success']),
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexgroup']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ('ZAPI', 'volume-modify-iter-async', ZRR['modify_async_result_success']),
+ ('ZAPI', 'job-get', ZRR['job_success']),
+ ])
+ args = copy.deepcopy(DEFAULT_ARGS)
+ del args['space_slo']
+ module_args = {
+ 'aggr_list': 'aggr_0,aggr_1',
+ 'aggr_list_multiplier': 2,
+ 'space_guarantee': 'file',
+ 'time_out': 20
+ }
+ assert create_and_apply(vol_module, args, module_args)['changed']
+
+
+def test_error_create_flex_group_manually():
+ ''' Test error create flexGroup manually '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['empty']),
+ ('ZAPI', 'volume-create-async', ZRR['error']),
+ ])
+ module_args = {
+ 'aggr_list': 'aggr_0,aggr_1',
+ 'aggr_list_multiplier': 2,
+ 'time_out': 20
+ }
+ error = 'Error provisioning volume test_vol of size 20971520: %s' % ZAPI_ERROR
+ assert create_and_apply(vol_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == error
+
+
+def test_partial_error_create_flex_group_manually():
+ ''' Test error create flexGroup manually '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('ZAPI', 'volume-get-iter', ZRR['empty']),
+ ('ZAPI', 'volume-create-async', ZRR['success']),
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexgroup']),
+ ('ZAPI', 'sis-get-iter', ZRR['insufficient_privileges']), # ignored but raises a warning
+ ('ZAPI', 'volume-modify-iter-async', ZRR['modify_async_result_failure']),
+ ])
+ args = copy.deepcopy(DEFAULT_ARGS)
+ del args['space_slo']
+ module_args = {
+ 'aggr_list': 'aggr_0,aggr_1',
+ 'aggr_list_multiplier': 2,
+ 'space_guarantee': 'file',
+ 'time_out': 20,
+ 'use_rest': 'auto'
+ }
+ error = 'Volume created with success, with missing attributes: Error modifying volume test_vol: error_in_modify'
+ assert create_and_apply(vol_module, args, module_args, fail=True)['msg'] == error
+ print_warnings()
+ assert_warning_was_raised('cannot read volume efficiency options (as expected when running as vserver): '
+ 'NetApp API failed. Reason - 12346:Insufficient privileges: user USERID does not have read access to this resource')
+
+
+def test_successful_create_flex_group_auto_provision():
+ ''' Test successful create flexGroup auto provision '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['empty']),
+ ('ZAPI', 'volume-create-async', ZRR['success']),
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexgroup']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ])
+ module_args = {
+ 'auto_provision_as': 'flexgroup',
+ 'time_out': 20
+ }
+ assert create_and_apply(vol_module, DEFAULT_ARGS, module_args)['changed']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_volume.NetAppOntapVolume.get_volume')
+def test_successful_delete_flex_group(get_volume):
+ ''' Test successful delete flexGroup '''
+ register_responses([
+ ('ZAPI', 'volume-unmount', ZRR['success']),
+ ('ZAPI', 'volume-offline-async', ZRR['job_success']),
+ ('ZAPI', 'volume-destroy-async', ZRR['job_success']),
+ ])
+ module_args = {
+ 'state': 'absent',
+ }
+ current = {
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'name': MOCK_VOL['name'],
+ 'vserver': MOCK_VOL['vserver'],
+ 'style_extended': 'flexgroup',
+ 'unix_permissions': '755',
+ 'is_online': True,
+ 'uuid': 'uuid'
+ }
+ get_volume.return_value = current
+ assert create_and_apply(vol_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def setup_resize():
+ module_args = {
+ 'size': 400,
+ 'size_unit': 'mb'
+ }
+ current = {
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'name': MOCK_VOL['name'],
+ 'vserver': MOCK_VOL['vserver'],
+ 'style_extended': 'flexgroup',
+ 'size': 20971520,
+ 'unix_permissions': '755',
+ 'uuid': '1234'
+ }
+ return module_args, current
+
+
+@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_volume.NetAppOntapVolume.get_volume')
+def test_successful_resize_flex_group(get_volume):
+ ''' Test successful reszie flexGroup '''
+ register_responses([
+ ('ZAPI', 'volume-size-async', ZRR['job_success']),
+ ])
+ module_args, current = setup_resize()
+ get_volume.return_value = current
+ assert create_and_apply(vol_module, DEFAULT_ARGS, module_args)['changed']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_volume.NetAppOntapVolume.get_volume')
+def test_error_resize_flex_group(get_volume):
+ ''' Test error reszie flexGroup '''
+ register_responses([
+ ('ZAPI', 'volume-size-async', ZRR['error']),
+ ])
+ module_args, current = setup_resize()
+ get_volume.return_value = current
+ error = 'Error re-sizing volume test_vol: %s' % ZAPI_ERROR
+ assert create_and_apply(vol_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == error
+
+
+@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_volume.NetAppOntapVolume.check_job_status')
+@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_volume.NetAppOntapVolume.get_volume')
+def test_successful_modify_unix_permissions_flex_group(get_volume, check_job_status):
+ ''' Test successful modify unix permissions flexGroup '''
+ register_responses([
+ ('ZAPI', 'volume-modify-iter-async', ZRR['modify_async_result_success']),
+ ])
+ module_args = {
+ 'unix_permissions': '---rw-r-xr-x',
+ 'time_out': 20
+ }
+ current = {
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'name': MOCK_VOL['name'],
+ 'vserver': MOCK_VOL['vserver'],
+ 'style_extended': 'flexgroup',
+ 'unix_permissions': '777',
+ 'uuid': '1234'
+ }
+ get_volume.return_value = current
+ check_job_status.return_value = None
+ assert create_and_apply(vol_module, DEFAULT_ARGS, module_args)['changed']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_volume.NetAppOntapVolume.get_volume')
+def test_successful_modify_unix_permissions_flex_group_0_time_out(get_volume):
+ ''' Test successful modify unix permissions flexGroup '''
+ register_responses([
+ ('ZAPI', 'volume-modify-iter-async', ZRR['modify_async_result_success']),
+ ])
+ module_args = {
+ 'unix_permissions': '---rw-r-xr-x',
+ 'time_out': 0
+ }
+ current = {
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'name': MOCK_VOL['name'],
+ 'vserver': MOCK_VOL['vserver'],
+ 'style_extended': 'flexgroup',
+ 'unix_permissions': '777',
+ 'uuid': '1234'
+ }
+ get_volume.return_value = current
+ assert create_and_apply(vol_module, DEFAULT_ARGS, module_args)['changed']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_volume.NetAppOntapVolume.get_volume')
+def test_successful_modify_unix_permissions_flex_group_0_missing_result(get_volume):
+ ''' Test successful modify unix permissions flexGroup '''
+ register_responses([
+ ('ZAPI', 'volume-modify-iter-async', ZRR['job_running']), # bad response
+ ])
+ module_args = {
+ 'unix_permissions': '---rw-r-xr-x',
+ 'time_out': 0
+ }
+ current = {
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'name': MOCK_VOL['name'],
+ 'vserver': MOCK_VOL['vserver'],
+ 'style_extended': 'flexgroup',
+ 'unix_permissions': '777',
+ 'uuid': '1234'
+ }
+ get_volume.return_value = current
+ # check_job_status.side_effect = ['job_error']
+ error = create_and_apply(vol_module, DEFAULT_ARGS, module_args, 'fail')
+ assert error['msg'].startswith('Unexpected error when modifying volume: result is:')
+
+
+@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_volume.NetAppOntapVolume.check_job_status')
+@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_volume.NetAppOntapVolume.get_volume')
+def test_error_modify_unix_permissions_flex_group(get_volume, check_job_status):
+ ''' Test error modify unix permissions flexGroup '''
+ register_responses([
+ ('ZAPI', 'volume-modify-iter-async', ZRR['modify_async_result_success']),
+ ])
+ module_args = {
+ 'unix_permissions': '---rw-r-xr-x',
+ 'time_out': 20
+ }
+ current = {
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'name': MOCK_VOL['name'],
+ 'vserver': MOCK_VOL['vserver'],
+ 'style_extended': 'flexgroup',
+ 'unix_permissions': '777',
+ 'uuid': '1234'
+ }
+ get_volume.return_value = current
+ check_job_status.side_effect = ['job_error']
+ error = create_and_apply(vol_module, DEFAULT_ARGS, module_args, 'fail')
+ assert error['msg'] == 'Error when modifying volume: job_error'
+
+
+@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_volume.NetAppOntapVolume.get_volume')
+def test_failure_modify_unix_permissions_flex_group(get_volume):
+ ''' Test failure modify unix permissions flexGroup '''
+ register_responses([
+ ('ZAPI', 'volume-modify-iter-async', ZRR['modify_async_result_failure']),
+ ])
+ module_args = {
+ 'unix_permissions': '---rw-r-xr-x',
+ 'time_out': 20
+ }
+ current = {
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'name': MOCK_VOL['name'],
+ 'vserver': MOCK_VOL['vserver'],
+ 'style_extended': 'flexgroup',
+ 'unix_permissions': '777',
+ 'uuid': '1234'
+ }
+ get_volume.return_value = current
+ error = create_and_apply(vol_module, DEFAULT_ARGS, module_args, 'fail')
+ assert error['msg'] == 'Error modifying volume test_vol: error_in_modify'
+
+
+def setup_offline_state():
+ module_args = {'is_online': False}
+ current = {
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'name': MOCK_VOL['name'],
+ 'vserver': MOCK_VOL['vserver'],
+ 'style_extended': 'flexgroup',
+ 'is_online': True,
+ 'junction_path': '/test',
+ 'unix_permissions': '755',
+ 'uuid': '1234'
+ }
+ return module_args, current
+
+
+@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_volume.NetAppOntapVolume.get_volume')
+def test_successful_offline_state_flex_group(get_volume):
+ ''' Test successful offline flexGroup state '''
+ register_responses([
+ ('ZAPI', 'volume-unmount', ZRR['success']),
+ ('ZAPI', 'volume-offline-async', ZRR['async_results']),
+ ('ZAPI', 'job-get', ZRR['job_success']),
+ ])
+ module_args, current = setup_offline_state()
+ get_volume.return_value = current
+ assert create_and_apply(vol_module, DEFAULT_ARGS, module_args)['changed']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_volume.NetAppOntapVolume.get_volume')
+def test_error_offline_state_flex_group(get_volume):
+ ''' Test error offline flexGroup state '''
+ register_responses([
+ ('ZAPI', 'volume-unmount', ZRR['success']),
+ ('ZAPI', 'volume-offline-async', ZRR['error']),
+ ])
+ module_args, current = setup_offline_state()
+ get_volume.return_value = current
+ error = 'Error changing the state of volume test_vol to offline: %s' % ZAPI_ERROR
+ assert create_and_apply(vol_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == error
+
+
+@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_volume.NetAppOntapVolume.get_volume')
+def test_error_unmounting_offline_state_flex_group(get_volume):
+ ''' Test error offline flexGroup state '''
+ register_responses([
+ ('ZAPI', 'volume-unmount', ZRR['error']),
+ ('ZAPI', 'volume-offline-async', ZRR['error']),
+ ])
+ module_args, current = setup_offline_state()
+ get_volume.return_value = current
+ error = 'Error changing the state of volume test_vol to offline: %s' % ZAPI_ERROR
+ msg = create_and_apply(vol_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert error in msg
+ errpr = 'Error unmounting volume test_vol: %s' % ZAPI_ERROR
+ assert error in msg
+
+
+@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_volume.NetAppOntapVolume.get_volume')
+def test_successful_online_state_flex_group(get_volume):
+ ''' Test successful online flexGroup state '''
+ register_responses([
+ ('ZAPI', 'volume-online-async', ZRR['async_results']),
+ ('ZAPI', 'job-get', ZRR['job_success']),
+ ('ZAPI', 'volume-modify-iter-async', ZRR['modify_async_result_success']),
+ ('ZAPI', 'job-get', ZRR['job_success']),
+ ('ZAPI', 'volume-mount', ZRR['success']),
+ ])
+ current = {
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'name': MOCK_VOL['name'],
+ 'vserver': MOCK_VOL['vserver'],
+ 'style_extended': 'flexgroup',
+ 'is_online': False,
+ 'junction_path': 'anything',
+ 'unix_permissions': '755',
+ 'uuid': '1234'
+ }
+ get_volume.return_value = current
+ assert create_and_apply(vol_module, DEFAULT_ARGS)['changed']
+ print_requests()
+ assert get_mock_record().is_text_in_zapi_request('<group-id>', 2)
+ assert get_mock_record().is_text_in_zapi_request('<user-id>', 2)
+ assert get_mock_record().is_text_in_zapi_request('<percentage-snapshot-reserve>', 2)
+ assert get_mock_record().is_text_in_zapi_request('<junction-path>/test</junction-path>', 4)
+
+
+def test_check_job_status_error():
+ ''' Test check job status error '''
+ register_responses([
+ ('ZAPI', 'job-get', ZRR['error']),
+ ])
+ module_args = {
+ 'aggr_list': 'aggr_0,aggr_1',
+ 'aggr_list_multiplier': 2,
+ 'time_out': 0
+ }
+ error = 'Error fetching job info: %s' % ZAPI_ERROR
+ assert expect_and_capture_ansible_exception(create_module(vol_module, MINIMUM_ARGS, module_args).check_job_status, 'fail', '123')['msg'] == error
+
+
+@patch('time.sleep')
+def test_check_job_status_not_found(skip_sleep):
+ ''' Test check job status error '''
+ register_responses([
+ ('ZAPI', 'job-get', ZRR['error_15661']),
+ ('ZAPI', 'vserver-get-iter', ZRR['no_records']),
+ ('ZAPI', 'job-get', ZRR['error_15661']),
+ ])
+ module_args = {
+ 'aggr_list': 'aggr_0,aggr_1',
+ 'aggr_list_multiplier': 2,
+ 'time_out': 50
+ }
+ error = 'cannot locate job with id: 123'
+ assert create_module(vol_module, MINIMUM_ARGS, module_args).check_job_status('123') == error
+
+
+@patch('time.sleep')
+def test_check_job_status_failure(skip_sleep):
+ ''' Test check job status error '''
+ register_responses([
+ ('ZAPI', 'job-get', ZRR['job_running']),
+ ('ZAPI', 'job-get', ZRR['job_running']),
+ ('ZAPI', 'job-get', ZRR['job_failure']),
+ ('ZAPI', 'job-get', ZRR['job_running']),
+ ('ZAPI', 'job-get', ZRR['job_running']),
+ ('ZAPI', 'job-get', ZRR['job_no_completion']),
+ ])
+ module_args = {
+ 'aggr_list': 'aggr_0,aggr_1',
+ 'aggr_list_multiplier': 2,
+ 'time_out': 20
+ }
+ msg = 'failure'
+ assert msg == create_module(vol_module, MINIMUM_ARGS, module_args).check_job_status('123')
+ msg = 'progress'
+ assert msg == create_module(vol_module, MINIMUM_ARGS, module_args).check_job_status('123')
+
+
+def test_check_job_status_time_out_is_0():
+ ''' Test check job status time out is 0'''
+ register_responses([
+ ('ZAPI', 'job-get', ZRR['job_time_out']),
+ ])
+ module_args = {
+ 'aggr_list': 'aggr_0,aggr_1',
+ 'aggr_list_multiplier': 2,
+ 'time_out': 0
+ }
+ msg = 'job completion exceeded expected timer of: 0 seconds'
+ assert msg == create_module(vol_module, MINIMUM_ARGS, module_args).check_job_status('123')
+
+
+def test_check_job_status_unexpected():
+ ''' Test check job status unexpected state '''
+ register_responses([
+ ('ZAPI', 'job-get', ZRR['job_other']),
+ ])
+ module_args = {
+ 'aggr_list': 'aggr_0,aggr_1',
+ 'aggr_list_multiplier': 2,
+ 'time_out': 20
+ }
+ msg = 'Unexpected job status in:'
+ assert msg in expect_and_capture_ansible_exception(create_module(vol_module, MINIMUM_ARGS, module_args).check_job_status, 'fail', '123')['msg']
+
+
+def test_successful_modify_tiering_policy():
+ ''' Test successful modify tiering policy '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexvol']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ('ZAPI', 'volume-modify-iter', ZRR['success']),
+ ])
+ module_args = {'tiering_policy': 'auto'}
+ assert create_and_apply(vol_module, DEFAULT_ARGS, module_args)['changed']
+ print_requests()
+ assert get_mock_record().is_text_in_zapi_request('<tiering-policy>auto</tiering-policy>', 2)
+
+
+def test_error_modify_tiering_policy():
+ ''' Test successful modify tiering policy '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexvol']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ('ZAPI', 'volume-modify-iter', ZRR['error']),
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexvol']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ('ZAPI', 'volume-modify-iter', ZRR['error_tiering_94']),
+ ])
+ module_args = {'tiering_policy': 'auto'}
+ error = zapi_error_message('Error modifying volume test_vol')
+ assert error in create_and_apply(vol_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ error = zapi_error_message('Error modifying volume test_vol', 94, 'volume-comp-aggr-attributes', '. Added info: tiering option requires 9.4 or later.')
+ assert error in create_and_apply(vol_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_successful_modify_vserver_dr_protection():
+ ''' Test successful modify vserver_dr_protection '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexvol']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ('ZAPI', 'volume-modify-iter', ZRR['success']),
+ ])
+ module_args = {'vserver_dr_protection': 'protected'}
+ assert create_and_apply(vol_module, DEFAULT_ARGS, module_args)['changed']
+ print_requests()
+ assert get_mock_record().is_text_in_zapi_request('<vserver-dr-protection>protected</vserver-dr-protection>', 2)
+
+
+def test_successful_group_id():
+ ''' Test successful modify group_id '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexvol']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ('ZAPI', 'volume-modify-iter', ZRR['success']),
+ ])
+ module_args = {'group_id': 1001}
+ assert create_and_apply(vol_module, DEFAULT_ARGS, module_args)['changed']
+ print_requests()
+ assert get_mock_record().is_text_in_zapi_request('<group-id>1001</group-id>', 2)
+
+
+def test_successful_modify_user_id():
+ ''' Test successful modify user_id '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexvol']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ('ZAPI', 'volume-modify-iter', ZRR['success']),
+ ])
+ module_args = {'user_id': 101}
+ assert create_and_apply(vol_module, DEFAULT_ARGS, module_args)['changed']
+ print_requests()
+ assert get_mock_record().is_text_in_zapi_request('<user-id>101</user-id>', 2)
+
+
+@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_volume.NetAppOntapVolume.get_volume')
+def test_successful_modify_snapshot_auto_delete(get_volume):
+ ''' Test successful modify unix permissions flexGroup '''
+ register_responses([
+ # One ZAPI call for each option!
+ ('ZAPI', 'snapshot-autodelete-set-option', ZRR['success']),
+ ('ZAPI', 'snapshot-autodelete-set-option', ZRR['success']),
+ ('ZAPI', 'snapshot-autodelete-set-option', ZRR['success']),
+ ('ZAPI', 'snapshot-autodelete-set-option', ZRR['success']),
+ ('ZAPI', 'snapshot-autodelete-set-option', ZRR['success']),
+ ('ZAPI', 'snapshot-autodelete-set-option', ZRR['success']),
+ ('ZAPI', 'snapshot-autodelete-set-option', ZRR['success']),
+ ('ZAPI', 'snapshot-autodelete-set-option', ZRR['success']),
+ ])
+ module_args = {
+ 'snapshot_auto_delete': {
+ 'delete_order': 'oldest_first', 'destroy_list': 'lun_clone,vol_clone',
+ 'target_free_space': 20, 'prefix': 'test', 'commitment': 'try',
+ 'state': 'on', 'trigger': 'snap_reserve', 'defer_delete': 'scheduled'}}
+ current = {
+ 'name': MOCK_VOL['name'],
+ 'vserver': MOCK_VOL['vserver'],
+ 'snapshot_auto_delete': {
+ 'delete_order': 'newest_first', 'destroy_list': 'lun_clone,vol_clone',
+ 'target_free_space': 30, 'prefix': 'test', 'commitment': 'try',
+ 'state': 'on', 'trigger': 'snap_reserve', 'defer_delete': 'scheduled'},
+ 'uuid': '1234'
+ }
+ get_volume.return_value = current
+ assert create_and_apply(vol_module, MINIMUM_ARGS, module_args)['changed']
+
+
+def test_error_modify_snapshot_auto_delete():
+ register_responses([
+ ('ZAPI', 'snapshot-autodelete-set-option', ZRR['error']),
+ ])
+ module_args = {'snapshot_auto_delete': {
+ 'delete_order': 'oldest_first', 'destroy_list': 'lun_clone,vol_clone',
+ 'target_free_space': 20, 'prefix': 'test', 'commitment': 'try',
+ 'state': 'on', 'trigger': 'snap_reserve', 'defer_delete': 'scheduled'}}
+ msg = 'Error setting snapshot auto delete options for volume test_vol: %s' % ZAPI_ERROR
+ assert msg == expect_and_capture_ansible_exception(create_module(vol_module, MINIMUM_ARGS, module_args).set_snapshot_auto_delete, 'fail')['msg']
+
+
+def test_successful_volume_rehost():
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['no_records']),
+ ('ZAPI', 'volume-rehost', ZRR['success']),
+ ])
+ module_args = {
+ 'from_vserver': 'source_vserver',
+ 'auto_remap_luns': False,
+ }
+ assert create_and_apply(vol_module, MINIMUM_ARGS, module_args)['changed']
+
+
+def test_error_volume_rehost():
+ register_responses([
+ ('ZAPI', 'volume-rehost', ZRR['error']),
+ ])
+ module_args = {
+ 'from_vserver': 'source_vserver',
+ 'force_unmap_luns': False,
+ }
+ msg = 'Error rehosting volume test_vol: %s' % ZAPI_ERROR
+ assert msg == expect_and_capture_ansible_exception(create_module(vol_module, MINIMUM_ARGS, module_args).rehost_volume, 'fail')['msg']
+
+
+def test_successful_volume_restore():
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexvol']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ('ZAPI', 'snapshot-restore-volume', ZRR['success']),
+ ])
+ module_args = {
+ 'snapshot_restore': 'snapshot_copy',
+ 'force_restore': True,
+ 'preserve_lun_ids': True
+ }
+ assert create_and_apply(vol_module, MINIMUM_ARGS, module_args)['changed']
+
+
+def test_error_volume_restore():
+ register_responses([
+ ('ZAPI', 'snapshot-restore-volume', ZRR['error']),
+ ])
+ module_args = {'snapshot_restore': 'snapshot_copy'}
+ msg = 'Error restoring volume test_vol: %s' % ZAPI_ERROR
+ assert msg == expect_and_capture_ansible_exception(create_module(vol_module, MINIMUM_ARGS, module_args).snapshot_restore_volume, 'fail')['msg']
+
+
+def test_error_modify_flexvol_to_flexgroup():
+ ''' Test successful modify vserver_dr_protection '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexvol']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ])
+ module_args = {'auto_provision_as': 'flexgroup'}
+ msg = 'Error: changing a volume from one backend to another is not allowed. Current: flexvol, desired: flexgroup.'
+ assert msg == create_and_apply(vol_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_error_modify_flexgroup_to_flexvol():
+ ''' Changing the style from flexgroup to flexvol is not allowed '''
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexgroup']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ])
+ module_args = {'aggregate_name': 'nothing'}
+ msg = 'Error: aggregate_name option cannot be used with FlexGroups.'
+ assert msg == create_and_apply(vol_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_error_snaplock_not_supported_with_zapi():
+ ''' Test successful modify vserver_dr_protection '''
+ module_args = {'snaplock': {'retention': {'default': 'P30TM'}}}
+ msg = 'Error: snaplock option is not supported with ZAPI. It can only be used with REST. use_rest: never.'
+ assert msg == create_module(vol_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_wait_for_task_completion_no_records():
+ register_responses([
+ ('ZAPI', 'results', ZRR['no_records']),
+ ])
+ # using response to build a request
+ zapi_iter, valid = build_zapi_response({'fake-iter': 'any'})
+ my_obj = create_module(vol_module, DEFAULT_ARGS)
+ assert my_obj.wait_for_task_completion(zapi_iter, lambda: True) is None
+
+
+def test_wait_for_task_completion_one_response():
+ register_responses([
+ ('ZAPI', 'results', ZRR['one_record_no_data']),
+ ])
+ # using response to build a request
+ zapi_iter, valid = build_zapi_response({'fake-iter': 'any'})
+ my_obj = create_module(vol_module, DEFAULT_ARGS)
+ assert my_obj.wait_for_task_completion(zapi_iter, lambda x: False) is None
+
+
+@patch('time.sleep')
+def test_wait_for_task_completion_loop(skip_sleep):
+ register_responses([
+ ('ZAPI', 'results', ZRR['one_record_no_data']),
+ ('ZAPI', 'results', ZRR['one_record_no_data']),
+ ('ZAPI', 'results', ZRR['one_record_no_data']),
+ ])
+
+ def check_state(x):
+ check_state.counter += 1
+ # True continues the wait loop
+ # False exits the loop
+ return (True, True, False)[check_state.counter - 1]
+
+ check_state.counter = 0
+
+ # using response to build a request
+ zapi_iter, valid = build_zapi_response({'fake-iter': 'any'})
+ my_obj = create_module(vol_module, DEFAULT_ARGS)
+ assert my_obj.wait_for_task_completion(zapi_iter, check_state) is None
+
+
+@patch('time.sleep')
+def test_wait_for_task_completion_loop_with_recoverable_error(skip_sleep):
+ register_responses([
+ ('ZAPI', 'results', ZRR['one_record_no_data']),
+ ('ZAPI', 'results', ZRR['error']),
+ ('ZAPI', 'results', ZRR['error']),
+ ('ZAPI', 'results', ZRR['error']),
+ ('ZAPI', 'results', ZRR['one_record_no_data']),
+ ('ZAPI', 'results', ZRR['error']),
+ ('ZAPI', 'results', ZRR['error']),
+ ('ZAPI', 'results', ZRR['error']),
+ ('ZAPI', 'results', ZRR['one_record_no_data']),
+ ])
+
+ def check_state(x):
+ check_state.counter += 1
+ return (True, True, False)[check_state.counter - 1]
+
+ check_state.counter = 0
+
+ # using response to build a request
+ zapi_iter, valid = build_zapi_response({'fake-iter': 'any'})
+ my_obj = create_module(vol_module, DEFAULT_ARGS)
+ assert my_obj.wait_for_task_completion(zapi_iter, check_state) is None
+
+
+@patch('time.sleep')
+def test_wait_for_task_completion_loop_with_non_recoverable_error(skip_sleep):
+ register_responses([
+ ('ZAPI', 'results', ZRR['one_record_no_data']),
+ ('ZAPI', 'results', ZRR['error']),
+ ('ZAPI', 'results', ZRR['error']),
+ ('ZAPI', 'results', ZRR['error']),
+ ('ZAPI', 'results', ZRR['one_record_no_data']),
+ ('ZAPI', 'results', ZRR['error']),
+ ('ZAPI', 'results', ZRR['error']),
+ ('ZAPI', 'results', ZRR['error']),
+ ('ZAPI', 'results', ZRR['error']),
+ ])
+
+ # using response to build a request
+ zapi_iter, valid = build_zapi_response({'fake-iter': 'any'})
+ my_obj = create_module(vol_module, DEFAULT_ARGS)
+ assert str(my_obj.wait_for_task_completion(zapi_iter, lambda x: True)) == ZAPI_ERROR
+
+
+@patch('time.sleep')
+def test_start_encryption_conversion(skip_sleep):
+ register_responses([
+ ('ZAPI', 'volume-encryption-conversion-start', ZRR['success']),
+ ('ZAPI', 'volume-encryption-conversion-get-iter', ZRR['vol_encryption_conversion_status_running']),
+ ('ZAPI', 'volume-encryption-conversion-get-iter', ZRR['vol_encryption_conversion_status_running']),
+ ('ZAPI', 'volume-encryption-conversion-get-iter', ZRR['vol_encryption_conversion_status_idle']),
+ ])
+ module_args = {
+ 'wait_for_completion': True,
+ 'max_wait_time': 120
+ }
+ my_obj = create_module(vol_module, DEFAULT_ARGS, module_args)
+ assert my_obj.start_encryption_conversion(True) is None
+
+
+@patch('time.sleep')
+def test_error_on_wait_for_start_encryption_conversion(skip_sleep):
+ register_responses([
+ ('ZAPI', 'volume-encryption-conversion-start', ZRR['success']),
+ ('ZAPI', 'volume-encryption-conversion-get-iter', ZRR['vol_encryption_conversion_status_running']),
+ ('ZAPI', 'volume-encryption-conversion-get-iter', ZRR['error']),
+ ('ZAPI', 'volume-encryption-conversion-get-iter', ZRR['error']),
+ ('ZAPI', 'volume-encryption-conversion-get-iter', ZRR['error']),
+ ('ZAPI', 'volume-encryption-conversion-get-iter', ZRR['vol_encryption_conversion_status_running']),
+ ('ZAPI', 'volume-encryption-conversion-get-iter', ZRR['error']),
+ ('ZAPI', 'volume-encryption-conversion-get-iter', ZRR['error']),
+ ('ZAPI', 'volume-encryption-conversion-get-iter', ZRR['error']),
+ ('ZAPI', 'volume-encryption-conversion-get-iter', ZRR['error']),
+ ])
+ module_args = {
+ 'wait_for_completion': True,
+ 'max_wait_time': 280
+ }
+ my_obj = create_module(vol_module, DEFAULT_ARGS, module_args)
+ error = expect_and_capture_ansible_exception(my_obj.start_encryption_conversion, 'fail', True)['msg']
+ assert error == 'Error getting volume encryption_conversion status: %s' % ZAPI_ERROR
+
+
+def test_error_start_encryption_conversion():
+ register_responses([
+ ('ZAPI', 'volume-encryption-conversion-start', ZRR['error']),
+ ])
+ module_args = {
+ 'wait_for_completion': True
+ }
+ my_obj = create_module(vol_module, DEFAULT_ARGS, module_args)
+ error = expect_and_capture_ansible_exception(my_obj.start_encryption_conversion, 'fail', True)['msg']
+ assert error == 'Error enabling encryption for volume test_vol: %s' % ZAPI_ERROR
+
+
+@patch('time.sleep')
+def test_wait_for_volume_encryption_conversion_with_non_recoverable_error(skip_sleep):
+ register_responses([
+ ('ZAPI', 'volume-encryption-conversion-get-iter', ZRR['vol_encryption_conversion_status_running']),
+ ('ZAPI', 'volume-encryption-conversion-get-iter', ZRR['error']),
+ ('ZAPI', 'volume-encryption-conversion-get-iter', ZRR['error']),
+ ('ZAPI', 'volume-encryption-conversion-get-iter', ZRR['error']),
+ ('ZAPI', 'volume-encryption-conversion-get-iter', ZRR['vol_encryption_conversion_status_running']),
+ ('ZAPI', 'volume-encryption-conversion-get-iter', ZRR['error']),
+ ('ZAPI', 'volume-encryption-conversion-get-iter', ZRR['error']),
+ ('ZAPI', 'volume-encryption-conversion-get-iter', ZRR['error']),
+ ('ZAPI', 'volume-encryption-conversion-get-iter', ZRR['error']),
+ ])
+ my_obj = create_module(vol_module, DEFAULT_ARGS)
+ error = expect_and_capture_ansible_exception(my_obj.wait_for_volume_encryption_conversion, 'fail')['msg']
+ assert error == 'Error getting volume encryption_conversion status: %s' % ZAPI_ERROR
+
+
+@patch('time.sleep')
+def test_wait_for_volume_encryption_conversion(skip_sleep):
+ register_responses([
+ ('ZAPI', 'volume-encryption-conversion-get-iter', ZRR['vol_encryption_conversion_status_running']),
+ ('ZAPI', 'volume-encryption-conversion-get-iter', ZRR['error']),
+ ('ZAPI', 'volume-encryption-conversion-get-iter', ZRR['error']),
+ ('ZAPI', 'volume-encryption-conversion-get-iter', ZRR['error']),
+ ('ZAPI', 'volume-encryption-conversion-get-iter', ZRR['vol_encryption_conversion_status_idle']),
+ ])
+ my_obj = create_module(vol_module, DEFAULT_ARGS)
+ assert my_obj.wait_for_volume_encryption_conversion() is None
+
+
+def test_wait_for_volume_encryption_conversion_bad_status():
+ register_responses([
+ ('ZAPI', 'volume-encryption-conversion-get-iter', ZRR['vol_encryption_conversion_status_error']),
+ ])
+ my_obj = create_module(vol_module, DEFAULT_ARGS)
+ error = expect_and_capture_ansible_exception(my_obj.wait_for_volume_encryption_conversion, 'fail')['msg']
+ assert error == 'Error converting encryption for volume test_vol: other'
+
+
+@patch('time.sleep')
+def test_wait_for_volume_move_with_non_recoverable_error(skip_sleep):
+ register_responses([
+ ('ZAPI', 'volume-move-get-iter', ZRR['vol_move_status_running']),
+ ('ZAPI', 'volume-move-get-iter', ZRR['error']),
+ ('ZAPI', 'volume-move-get-iter', ZRR['error']),
+ ('ZAPI', 'volume-move-get-iter', ZRR['error']),
+ ('ZAPI', 'volume-move-get-iter', ZRR['vol_move_status_running']),
+ ('ZAPI', 'volume-move-get-iter', ZRR['error']),
+ ('ZAPI', 'volume-move-get-iter', ZRR['error']),
+ ('ZAPI', 'volume-move-get-iter', ZRR['error']),
+ ('ZAPI', 'volume-move-get-iter', ZRR['error']),
+ ])
+ my_obj = create_module(vol_module, DEFAULT_ARGS)
+ error = expect_and_capture_ansible_exception(my_obj.wait_for_volume_move, 'fail')['msg']
+ assert error == 'Error getting volume move status: %s' % ZAPI_ERROR
+
+
+@patch('time.sleep')
+def test_wait_for_volume_move(skip_sleep):
+ register_responses([
+ ('ZAPI', 'volume-move-get-iter', ZRR['vol_move_status_running']),
+ ('ZAPI', 'volume-move-get-iter', ZRR['error']),
+ ('ZAPI', 'volume-move-get-iter', ZRR['error']),
+ ('ZAPI', 'volume-move-get-iter', ZRR['error']),
+ ('ZAPI', 'volume-move-get-iter', ZRR['vol_move_status_idle']),
+ ])
+ my_obj = create_module(vol_module, DEFAULT_ARGS)
+ assert my_obj.wait_for_volume_move() is None
+
+
+def test_wait_for_volume_move_bad_status():
+ register_responses([
+ ('ZAPI', 'volume-move-get-iter', ZRR['vol_move_status_error']),
+ ])
+ my_obj = create_module(vol_module, DEFAULT_ARGS)
+ error = expect_and_capture_ansible_exception(my_obj.wait_for_volume_move, 'fail')['msg']
+ assert error == 'Error moving volume test_vol: some info'
+
+
+def test_error_validate_snapshot_auto_delete():
+ module_args = {
+ 'snapshot_auto_delete': {
+ 'commitment': 'whatever',
+ 'unknown': 'unexpected option'
+ }
+ }
+ error = "snapshot_auto_delete option 'unknown' is not valid."
+ assert create_module(vol_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == error
+
+
+def test_get_snapshot_auto_delete_attributes():
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexgroup']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ])
+ result = create_module(vol_module, DEFAULT_ARGS).get_volume()
+ assert 'snapshot_auto_delete' in result
+ assert 'is_autodelete_enabled' not in result['snapshot_auto_delete']
+ assert result['snapshot_auto_delete']['state'] == 'on'
+
+
+def test_error_on_get_efficiency_info():
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexvol']),
+ ('ZAPI', 'sis-get-iter', ZRR['error']),
+ ])
+ error = 'Error fetching efficiency policy for volume test_vol: %s' % ZAPI_ERROR
+ assert call_main(my_main, DEFAULT_ARGS, fail=True)['msg'] == error
+
+
+def test_create_volume_from_main():
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['no_records']),
+ ('ZAPI', 'volume-create', ZRR['success']),
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexvol']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ('ZAPI', 'volume-modify-iter', ZRR['success']),
+ ('ZAPI', 'volume-unmount', ZRR['success']),
+ ('ZAPI', 'volume-offline', ZRR['success'])
+ ])
+ args = dict(DEFAULT_ARGS)
+ del args['space_slo']
+ module_args = {
+ 'aggregate_name': MOCK_VOL['aggregate'],
+ 'comment': 'some comment',
+ 'is_online': False,
+ 'space_guarantee': 'file',
+ 'tiering_policy': 'snapshot-only',
+ 'volume_security_style': 'unix',
+ 'vserver_dr_protection': 'unprotected',
+ }
+ assert call_main(my_main, args, module_args)['changed']
+
+
+def test_error_create_volume_change_in_type():
+ register_responses([
+ ('ZAPI', 'volume-get-iter', ZRR['no_records']),
+ ('ZAPI', 'volume-create', ZRR['success']),
+ ('ZAPI', 'volume-get-iter', ZRR['get_flexvol']),
+ ('ZAPI', 'sis-get-iter', ZRR['no_records']),
+ ])
+ args = dict(DEFAULT_ARGS)
+ module_args = {
+ 'aggregate_name': MOCK_VOL['aggregate'],
+ 'type': 'dp',
+ }
+ error = 'Error: volume type was not set properly at creation time. Current: rw, desired: dp.'
+ assert call_main(my_main, args, module_args, fail=True)['msg'] == error
+
+
+def test_create_volume_attribute():
+ obj = create_module(vol_module, DEFAULT_ARGS)
+ # str
+ obj.parameters['option_name'] = 'my_option'
+ parent = netapp_utils.zapi.NaElement('results')
+ obj.create_volume_attribute(None, parent, 'zapi_name', 'option_name')
+ print(parent.to_string())
+ assert parent['zapi_name'] == 'my_option'
+ # int - fail, unless converted
+ obj.parameters['option_name'] = 123
+ expect_and_capture_ansible_exception(obj.create_volume_attribute, TypeError, None, parent, 'zapi_name', 'option_name')
+ parent = netapp_utils.zapi.NaElement('results')
+ obj.create_volume_attribute(None, parent, 'zapi_name', 'option_name', int)
+ assert parent['zapi_name'] == '123'
+ # boolmodify_volume_efficiency_config
+ obj.parameters['option_name'] = False
+ parent = netapp_utils.zapi.NaElement('results')
+ obj.create_volume_attribute(None, parent, 'zapi_name', 'option_name', bool)
+ assert parent['zapi_name'] == 'false'
+ # parent->attrs->attr
+ # create child
+ parent = netapp_utils.zapi.NaElement('results')
+ obj.create_volume_attribute('child', parent, 'zapi_name', 'option_name', bool)
+ assert parent['child']['zapi_name'] == 'false'
+ # use existing child in parent
+ obj.create_volume_attribute('child', parent, 'zapi_name2', 'option_name', bool)
+ assert parent['child']['zapi_name2'] == 'false'
+ # pass child
+ parent = netapp_utils.zapi.NaElement('results')
+ child = netapp_utils.zapi.NaElement('child')
+ obj.create_volume_attribute(child, parent, 'zapi_name', 'option_name', bool)
+ assert parent['child']['zapi_name'] == 'false'
+
+
+def test_check_invoke_result():
+ register_responses([
+ # 3rd run
+ ('ZAPI', 'job-get', ZRR['job_success']),
+ # 3th run
+ ('ZAPI', 'job-get', ZRR['job_failure']),
+ ])
+ module_args = {
+ 'time_out': 0
+ }
+ obj = create_module(vol_module, DEFAULT_ARGS, module_args)
+ # 1 - operation failed immediately
+ error = 'Operation failed when testing volume.'
+ assert error in expect_and_capture_ansible_exception(obj.check_invoke_result, 'fail', ZRR['failed_results'][0], 'testing')['msg']
+ # 2 - operation in progress - exit immediately as time_out is 0
+ assert obj.check_invoke_result(ZRR['async_results'][0], 'testing') is None
+ module_args = {
+ 'time_out': 10
+ }
+ # 3 - operation in progress - job reported success
+ obj = create_module(vol_module, DEFAULT_ARGS, module_args)
+ assert obj.check_invoke_result(ZRR['async_results'][0], 'testing') is None
+ # 4 - operation in progress - job reported a failure
+ obj = create_module(vol_module, DEFAULT_ARGS, module_args)
+ error = 'Error when testing volume: failure'
+ assert error in expect_and_capture_ansible_exception(obj.check_invoke_result, 'fail', ZRR['async_results'][0], 'testing')['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_volume_autosize.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_volume_autosize.py
new file mode 100644
index 000000000..662d95bfe
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_volume_autosize.py
@@ -0,0 +1,367 @@
+# (c) 2019, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests for Ansible module: na_ontap_volume_autosize '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import call_main, patch_ansible, create_module, create_and_apply
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_volume_autosize \
+ import NetAppOntapVolumeAutosize as autosize_module, main as my_main # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+# REST API canned responses when mocking send_request
+SRR = rest_responses({
+ # common responses
+ 'is_rest': (200, {}, None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'end_of_sequence': (500, None, "Unexpected call to send_request"),
+ 'generic_error': (400, None, "Expected error"),
+ # module specific responses
+ 'get_uuid': (200, {'records': [{'uuid': 'testuuid'}]}, None),
+ 'get_autosize': (200,
+ {'uuid': 'testuuid',
+ 'name': 'testname',
+ 'autosize': {"maximum": 10737418240,
+ "minimum": 22020096,
+ "grow_threshold": 99,
+ "shrink_threshold": 40,
+ "mode": "grow"
+ }
+ }, None),
+ 'get_autosize_empty': (200, {
+ 'uuid': 'testuuid',
+ 'name': 'testname',
+ 'autosize': {}
+ }, None)
+})
+
+
+MOCK_AUTOSIZE = {
+ 'grow_threshold_percent': 99,
+ 'maximum_size': '10g',
+ 'minimum_size': '21m',
+ 'increment_size': '10m',
+ 'mode': 'grow',
+ 'shrink_threshold_percent': 40,
+ 'vserver': 'test_vserver',
+ 'volume': 'test_volume'
+}
+
+
+autosize_info = {
+ 'grow-threshold-percent': MOCK_AUTOSIZE['grow_threshold_percent'],
+ 'maximum-size': '10485760',
+ 'minimum-size': '21504',
+ 'increment-size': '10240',
+ 'mode': MOCK_AUTOSIZE['mode'],
+ 'shrink-threshold-percent': MOCK_AUTOSIZE['shrink_threshold_percent']
+}
+
+
+ZRR = zapi_responses({
+ 'get_autosize': build_zapi_response(autosize_info)
+})
+
+
+DEFAULT_ARGS = {
+ 'vserver': MOCK_AUTOSIZE['vserver'],
+ 'volume': MOCK_AUTOSIZE['volume'],
+ 'grow_threshold_percent': MOCK_AUTOSIZE['grow_threshold_percent'],
+ 'maximum_size': MOCK_AUTOSIZE['maximum_size'],
+ 'minimum_size': MOCK_AUTOSIZE['minimum_size'],
+ 'mode': MOCK_AUTOSIZE['mode'],
+ 'shrink_threshold_percent': MOCK_AUTOSIZE['shrink_threshold_percent'],
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!'
+}
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ args = dict(DEFAULT_ARGS)
+ args.pop('vserver')
+ error = 'missing required arguments: vserver'
+ assert create_module(autosize_module, args, fail=True)['msg'] == error
+
+
+def test_idempotent_modify():
+ register_responses([
+ ('ZAPI', 'volume-autosize-get', ZRR['get_autosize']),
+ ])
+ module_args = {
+ 'use_rest': 'never'
+ }
+ assert not create_and_apply(autosize_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successful_modify():
+ register_responses([
+ ('ZAPI', 'volume-autosize-get', ZRR['get_autosize']),
+ ('ZAPI', 'volume-autosize-set', ZRR['success']),
+ ])
+ module_args = {
+ 'increment_size': MOCK_AUTOSIZE['increment_size'],
+ 'maximum_size': '11g',
+ 'use_rest': 'never'
+ }
+ assert create_and_apply(autosize_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_zapi__create_get_volume_return_no_data():
+ module_args = {
+ 'use_rest': 'never'
+ }
+ my_obj = create_module(autosize_module, DEFAULT_ARGS, module_args)
+ assert my_obj._create_get_volume_return(build_zapi_response({'unsupported_key': 'value'})[0]) is None
+
+
+def test_error_get():
+ register_responses([
+ ('ZAPI', 'volume-autosize-get', ZRR['error']),
+ ])
+ module_args = {
+ 'use_rest': 'never'
+ }
+ error = 'Error fetching volume autosize info for test_volume: NetApp API failed. Reason - 12345:synthetic error for UT purpose.'
+ assert create_and_apply(autosize_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == error
+
+
+def test_error_modify():
+ register_responses([
+ ('ZAPI', 'volume-autosize-get', ZRR['get_autosize']),
+ ('ZAPI', 'volume-autosize-set', ZRR['error']),
+ ])
+ module_args = {
+ 'increment_size': MOCK_AUTOSIZE['increment_size'],
+ 'maximum_size': '11g',
+ 'use_rest': 'never'
+ }
+ error = 'Error modifying volume autosize for test_volume: NetApp API failed. Reason - 12345:synthetic error for UT purpose.'
+ assert create_and_apply(autosize_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == error
+
+
+def test_successful_reset():
+ register_responses([
+ ('ZAPI', 'volume-autosize-get', ZRR['get_autosize']),
+ ('ZAPI', 'volume-autosize-set', ZRR['success']),
+ ])
+ args = dict(DEFAULT_ARGS)
+ for arg in ('maximum_size', 'minimum_size', 'grow_threshold_percent', 'shrink_threshold_percent', 'mode'):
+ # remove args that are eclusive with reset
+ args.pop(arg)
+ module_args = {
+ 'reset': True,
+ 'use_rest': 'never'
+ }
+ assert create_and_apply(autosize_module, args, module_args)['changed']
+
+
+def test_rest_error_volume_not_found():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['zero_records']),
+ ])
+ error = 'Error fetching volume autosize info for test_volume: volume not found for vserver test_vserver.'
+ assert create_and_apply(autosize_module, DEFAULT_ARGS, fail=True)['msg'] == error
+
+
+def test_rest_error_get():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['generic_error']),
+ ])
+ module_args = {
+ 'maximum_size': '11g'
+ }
+ error = 'Error fetching volume autosize info for test_volume: calling: storage/volumes: got Expected error.'
+ assert create_and_apply(autosize_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == error
+
+
+def test_rest_error_patch():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_autosize']),
+ ('PATCH', 'storage/volumes/testuuid', SRR['generic_error']),
+ ])
+ module_args = {
+ 'maximum_size': '11g'
+ }
+ error = 'Error modifying volume autosize for test_volume: calling: storage/volumes/testuuid: got Expected error.'
+ assert create_and_apply(autosize_module, DEFAULT_ARGS, module_args, fail=True)['msg'] == error
+
+
+def test_rest_successful_modify():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_autosize']),
+ ('PATCH', 'storage/volumes/testuuid', SRR['success']),
+ ])
+ module_args = {
+ 'maximum_size': '11g'
+ }
+ assert create_and_apply(autosize_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_rest_idempotent_modify():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_autosize']),
+ ])
+ assert not create_and_apply(autosize_module, DEFAULT_ARGS)['changed']
+
+
+def test_rest_idempotent_modify_no_attributes():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_autosize_empty']),
+ ])
+ module_args = {
+ 'maximum_size': '11g'
+ }
+ assert not create_and_apply(autosize_module, DEFAULT_ARGS)['changed']
+
+
+def test_rest__create_get_volume_return_no_data():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ])
+ my_obj = create_module(autosize_module, DEFAULT_ARGS)
+ assert my_obj._create_get_volume_return({'unsupported_key': 'value'}) == {'uuid': None}
+
+
+def test_rest_modify_no_data():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ])
+ my_obj = create_module(autosize_module, DEFAULT_ARGS)
+ # remove all attributes
+ for arg in ('maximum_size', 'minimum_size', 'grow_threshold_percent', 'shrink_threshold_percent', 'mode'):
+ my_obj.parameters.pop(arg)
+ assert my_obj.modify_volume_autosize('uuid') is None
+
+
+def test_rest_convert_to_bytes():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ])
+ my_obj = create_module(autosize_module, DEFAULT_ARGS)
+
+ module_args = {
+ 'minimum_size': '11k'
+ }
+ assert my_obj.convert_to_byte('minimum_size', module_args) == 11 * 1024
+
+ module_args = {
+ 'minimum_size': '11g'
+ }
+ assert my_obj.convert_to_byte('minimum_size', module_args) == 11 * 1024 * 1024 * 1024
+
+
+def test_rest_convert_to_kb():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ])
+ my_obj = create_module(autosize_module, DEFAULT_ARGS)
+
+ module_args = {
+ 'minimum_size': '11k'
+ }
+ assert my_obj.convert_to_kb('minimum_size', module_args) == 11
+
+ module_args = {
+ 'minimum_size': '11g'
+ }
+ assert my_obj.convert_to_kb('minimum_size', module_args) == 11 * 1024 * 1024
+
+
+def test_rest_invalid_values():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_autosize']),
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_autosize']),
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_autosize']),
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_autosize'])
+ ])
+ module_args = {
+ 'minimum_size': '11kb'
+ }
+ error = 'minimum_size must end with a k, m, g or t, found b in 11kb.'
+ assert call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg'] == error
+
+ module_args = {
+ 'minimum_size': '11kk'
+ }
+ error = 'minimum_size must start with a number, found 11k in 11kk.'
+ assert call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg'] == error
+
+ module_args = {
+ 'minimum_size': ''
+ }
+ error = "minimum_size must start with a number, and must end with a k, m, g or t, found ''."
+ assert call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg'] == error
+
+ module_args = {
+ 'minimum_size': 10
+ }
+ error = 'minimum_size must end with a k, m, g or t, found 0 in 10.'
+ assert call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg'] == error
+
+
+def test_rest_unsupported_parameters():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_autosize'])
+ ])
+ module_args = {
+ 'increment_size': '11k'
+ }
+ error = 'Rest API does not support increment size, please switch to ZAPI'
+ assert call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg'] == error
+
+ # reset is not supported - when set to True
+ module_args = {
+ 'reset': True
+ }
+ args = dict(DEFAULT_ARGS)
+ for arg in ('maximum_size', 'minimum_size', 'grow_threshold_percent', 'shrink_threshold_percent', 'mode'):
+ # remove args that are eclusive with reset
+ args.pop(arg)
+ error = 'Rest API does not support reset, please switch to ZAPI'
+ assert call_main(my_main, args, module_args, fail=True)['msg'] == error
+
+ # reset is ignored when False
+ module_args = {
+ 'reset': False
+ }
+ assert not call_main(my_main, args, module_args)['changed']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.has_netapp_lib')
+def test_missing_netapp_lib(mock_has_netapp_lib):
+ module_args = {
+ 'use_rest': 'never',
+ }
+ mock_has_netapp_lib.return_value = False
+ msg = 'Error: the python NetApp-Lib module is required. Import error: None'
+ assert msg == create_module(autosize_module, DEFAULT_ARGS, module_args, fail=True)['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_volume_clone.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_volume_clone.py
new file mode 100644
index 000000000..f68401348
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_volume_clone.py
@@ -0,0 +1,210 @@
+# (c) 2018, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests ONTAP Ansible module: na_ontap_volume_clone'''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import \
+ call_main, create_and_apply, create_module, expect_and_capture_ansible_exception, patch_ansible
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import zapi_responses, build_zapi_response, build_zapi_error
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_volume_clone \
+ import NetAppONTAPVolumeClone as my_module, main as my_main
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+clone_info = {
+ 'attributes': {
+ 'volume-clone-info': {
+ 'volume': 'ansible',
+ 'parent-volume': 'ansible'}}}
+
+clone_info_split_in_progress = {
+ 'attributes': {
+ 'volume-clone-info': {
+ 'volume': 'ansible',
+ 'parent-volume': 'ansible',
+ 'block-percentage-complete': 20,
+ 'blocks-scanned': 56676,
+ 'blocks-updated': 54588}}}
+
+ZRR = zapi_responses({
+ 'clone_info': build_zapi_response(clone_info, 1),
+ 'clone_info_split_in_progress': build_zapi_response(clone_info_split_in_progress, 1),
+ 'error_no_clone': build_zapi_error(15661, 'flexclone not found.')
+})
+
+DEFAULT_ARGS = {
+ 'hostname': '10.10.10.10',
+ 'username': 'username',
+ 'password': 'password',
+ 'vserver': 'ansible',
+ 'volume': 'ansible',
+ 'parent_volume': 'ansible',
+ 'split': None,
+ 'use_rest': 'never'
+}
+
+
+def test_module_fail_when_required_args_missing():
+ ''' test required arguments are reported as errors '''
+ msg = create_module(my_module, fail=True)['msg']
+ print('Info: %s' % msg)
+
+
+def test_ensure_get_called():
+ ''' test get_volume_clone() for non-existent volume clone'''
+ register_responses([
+ ('volume-clone-get', ZRR['empty'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ assert my_obj.get_volume_clone() is None
+
+
+def test_ensure_get_called_existing():
+ ''' test get_volume_clone() for existing volume clone'''
+ register_responses([
+ ('volume-clone-get', ZRR['clone_info'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ current = {'split': False}
+ assert my_obj.get_volume_clone() == current
+
+
+def test_ensure_get_called_no_clone_error():
+ ''' test get_volume_clone() for existing volume clone'''
+ register_responses([
+ ('volume-clone-get', ZRR['error_no_clone'])
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ current = {'split': False}
+ assert my_obj.get_volume_clone() is None
+
+
+def test_successful_create():
+ ''' test creating volume_clone without split and testing idempotency '''
+ register_responses([
+ ('volume-clone-get', ZRR['empty']),
+ ('volume-clone-create', ZRR['success']),
+ ('volume-clone-get', ZRR['clone_info']),
+ ])
+ module_args = {
+ 'parent_snapshot': 'abc',
+ 'volume_type': 'dp',
+ 'qos_policy_group_name': 'abc',
+ 'junction_path': 'abc',
+ 'uid': '1',
+ 'gid': '1'
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successful_create_with_split():
+ ''' test creating volume_clone with split and testing idempotency '''
+ register_responses([
+ # first test, create and split
+ ('volume-clone-get', ZRR['empty']),
+ ('volume-clone-create', ZRR['success']),
+ ('volume-clone-split-start', ZRR['success']),
+ # second test, clone already exists but is not split
+ ('volume-clone-get', ZRR['clone_info']),
+ ('volume-clone-split-start', ZRR['success']),
+ # third test, clone already exists, split already in progress
+ ('volume-clone-get', ZRR['clone_info_split_in_progress']),
+ ])
+ module_args = {
+ 'parent_snapshot': 'abc',
+ 'volume_type': 'dp',
+ 'qos_policy_group_name': 'abc',
+ 'junction_path': 'abc',
+ 'uid': '1',
+ 'gid': '1',
+ 'split': True
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successful_create_with_parent_vserver():
+ ''' test creating volume_clone with split and testing idempotency '''
+ register_responses([
+ # first test, create and split
+ ('volume-clone-get', ZRR['empty']),
+ ('volume-clone-create', ZRR['success']),
+ ('volume-clone-split-start', ZRR['success']),
+ # second test, clone already exists but is not split
+ ('volume-clone-get', ZRR['clone_info']),
+ ('volume-clone-split-start', ZRR['success']),
+ # third test, clone already exists, split already in progress
+ ('volume-clone-get', ZRR['clone_info_split_in_progress']),
+ ])
+ module_args = {
+ 'parent_snapshot': 'abc',
+ 'parent_vserver': 'abc',
+ 'volume_type': 'dp',
+ 'qos_policy_group_name': 'abc',
+ 'space_reserve': 'volume',
+ 'split': True
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+ assert not create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_vserver_cluster_options_give_error():
+ module_args = {
+ 'parent_snapshot': 'abc',
+ 'parent_vserver': 'abc',
+ 'volume_type': 'dp',
+ 'qos_policy_group_name': 'abc',
+ 'junction_path': 'abc',
+ 'uid': '1',
+ 'gid': '1'
+ }
+ msg = create_module(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert "parameters are mutually exclusive: " in msg
+ print('Info: %s' % msg)
+
+
+def test_if_all_methods_catch_exception():
+ ''' test if all methods catch exception '''
+ register_responses([
+ ('volume-clone-get', ZRR['error']),
+ ('volume-clone-create', ZRR['error']),
+ ('volume-clone-split-start', ZRR['error']),
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ msg = expect_and_capture_ansible_exception(my_obj.get_volume_clone, 'fail')['msg']
+ assert 'Error fetching volume clone information ' in msg
+ msg = expect_and_capture_ansible_exception(my_obj.create_volume_clone, 'fail')['msg']
+ assert 'Error creating volume clone: ' in msg
+ msg = expect_and_capture_ansible_exception(my_obj.start_volume_clone_split, 'fail')['msg']
+ assert 'Error starting volume clone split: ' in msg
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.has_netapp_lib')
+def test_fail_missing_netapp_lib(mock_has_netapp_lib):
+ ''' test error when netapp_lib is missing '''
+ mock_has_netapp_lib.return_value = False
+ msg = create_module(my_module, DEFAULT_ARGS, fail=True)['msg']
+ assert 'Error: the python NetApp-Lib module is required. Import error: None' == msg
+
+
+def test_main():
+ ''' validate call to main() '''
+ register_responses([
+ ('volume-clone-get', ZRR['empty']),
+ ('volume-clone-create', ZRR['success']),
+ ])
+ assert call_main(my_main, DEFAULT_ARGS)['changed']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_volume_clone_rest.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_volume_clone_rest.py
new file mode 100644
index 000000000..ba0767d42
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_volume_clone_rest.py
@@ -0,0 +1,244 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import copy
+import pytest
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import \
+ patch_ansible, create_and_apply, create_module, expect_and_capture_ansible_exception
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_volume_clone \
+ import NetAppONTAPVolumeClone as my_module # module under test
+
+# needed for get and modify/delete as they still use ZAPI
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+# REST API canned responses when mocking send_request
+
+clone_info = {
+ "clone": {
+ "is_flexclone": True,
+ "parent_snapshot": {
+ "name": "clone_ansibleVolume12_.2022-01-25_211704.0"
+ },
+ "parent_svm": {
+ "name": "ansibleSVM"
+ },
+ "parent_volume": {
+ "name": "ansibleVolume12"
+ }
+ },
+ "name": "ansibleVolume12_clone",
+ "nas": {
+ "gid": 0,
+ "uid": 0
+ },
+ "svm": {
+ "name": "ansibleSVM"
+ },
+ "uuid": "2458688d-7e24-11ec-a267-005056b30cfa"
+}
+
+clone_info_no_uuid = dict(clone_info)
+clone_info_no_uuid.pop('uuid')
+clone_info_not_a_clone = copy.deepcopy(clone_info)
+clone_info_not_a_clone['clone']['is_flexclone'] = False
+
+SRR = rest_responses({
+ 'volume_clone': (
+ 200,
+ {'records': [
+ clone_info,
+ ]}, None
+ ),
+ 'volume_clone_no_uuid': (
+ 200,
+ {'records': [
+ clone_info_no_uuid,
+ ]}, None
+ ),
+ 'volume_clone_not_a_clone': (
+ 200,
+ {'records': [
+ clone_info_not_a_clone,
+ ]}, None
+ ),
+ 'two_records': (
+ 200,
+ {'records': [
+ clone_info,
+ clone_info_no_uuid,
+ ]}, None
+ )
+})
+
+
+DEFAULT_ARGS = {
+ 'vserver': 'ansibleSVM',
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'use_rest': 'always',
+ 'name': 'clone_of_parent_volume',
+ 'parent_volume': 'parent_volume'
+}
+
+
+def test_successfully_create_clone():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['empty_records']),
+ ('POST', 'storage/volumes', SRR['volume_clone']),
+ ])
+ assert create_and_apply(my_module, DEFAULT_ARGS, {})['changed']
+
+
+def test_error_getting_volume_clone():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['generic_error']),
+ ])
+ my_module_object = create_module(my_module, DEFAULT_ARGS)
+ msg = 'Error getting volume clone clone_of_parent_volume: calling: storage/volumes: got Expected error.'
+ assert msg in expect_and_capture_ansible_exception(my_module_object.get_volume_clone_rest, 'fail')['msg']
+
+
+def test_error_creating_volume_clone():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('POST', 'storage/volumes', SRR['generic_error']),
+ ])
+ my_module_object = create_module(my_module, DEFAULT_ARGS)
+ msg = 'Error creating volume clone clone_of_parent_volume: calling: storage/volumes: got Expected error.'
+ assert msg in expect_and_capture_ansible_exception(my_module_object.create_volume_clone_rest, 'fail')['msg']
+
+
+def test_error_space_reserve_volume_clone():
+ error = create_module(my_module, fail=True)['msg']
+ print('Info: %s' % error)
+ assert 'missing required arguments:' in error
+ assert 'name' in error
+
+
+def test_successfully_create_with_optional_clone():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['empty_records']),
+ ('POST', 'storage/volumes', SRR['volume_clone']),
+ ])
+ module_args = {
+ 'qos_policy_group_name': 'test_policy_name',
+ 'parent_snapshot': 'test_snapshot',
+ 'volume_type': 'rw',
+ 'junction_path': '/test_junction_path',
+ 'uid': 10,
+ 'gid': 20,
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successfully_create_with_parent_vserver_clone():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['empty_records']),
+ ('POST', 'storage/volumes', SRR['volume_clone']),
+ ])
+ module_args = {
+ 'qos_policy_group_name': 'test_policy_name',
+ 'parent_snapshot': 'test_snapshot',
+ 'volume_type': 'rw',
+ 'parent_vserver': 'test_vserver',
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successfully_create_and_split_clone():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['empty_records']),
+ ('POST', 'storage/volumes', SRR['volume_clone']),
+ ('PATCH', 'storage/volumes/2458688d-7e24-11ec-a267-005056b30cfa', SRR['empty_good']),
+ ])
+ module_args = {'split': True}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_negative_create_no_uuid():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['empty_records']),
+ ('POST', 'storage/volumes', SRR['empty_records']),
+ ])
+ module_args = {'split': True}
+ msg = create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert msg == 'Error starting volume clone split clone_of_parent_volume: clone UUID is not set'
+
+
+def test_negative_create_no_uuid_in_response():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['empty_records']),
+ ('POST', 'storage/volumes', SRR['volume_clone_no_uuid']),
+ ])
+ module_args = {'split': True}
+ msg = create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert msg.startswith('Error: failed to parse create clone response: uuid key not present in')
+
+
+def test_negative_create_bad_response():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['empty_records']),
+ ('POST', 'storage/volumes', SRR['two_records']),
+ ])
+ module_args = {'split': True}
+ msg = create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert msg.startswith('Error: failed to parse create clone response: calling: storage/volumes: unexpected response ')
+
+
+def test_successfully_split_clone():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['volume_clone']),
+ ('PATCH', 'storage/volumes/2458688d-7e24-11ec-a267-005056b30cfa', SRR['empty_good']),
+ ])
+ module_args = {'split': True}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_split_volume_clone():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('PATCH', 'storage/volumes/2458688d-7e24-11ec-a267-005056b30cfa', SRR['generic_error']),
+ ])
+ my_obj = create_module(my_module, DEFAULT_ARGS)
+ my_obj.uuid = '2458688d-7e24-11ec-a267-005056b30cfa'
+ my_obj.parameters['split'] = True
+ msg = "Error starting volume clone split clone_of_parent_volume: calling: storage/volumes/2458688d-7e24-11ec-a267-005056b30cfa: got Expected error."
+ assert msg == expect_and_capture_ansible_exception(my_obj.start_volume_clone_split_rest, 'fail')['msg']
+
+
+def test_volume_not_a_clone():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['volume_clone_not_a_clone']),
+ ])
+ module_args = {'split': True}
+ assert not create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_volume_not_a_clone():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['volume_clone_not_a_clone']),
+ ])
+ module_args = {'split': False}
+ msg = create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+ assert msg == 'Error: a volume clone_of_parent_volume which is not a FlexClone already exists, and split not requested.'
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_volume_efficiency.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_volume_efficiency.py
new file mode 100644
index 000000000..104cc8e51
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_volume_efficiency.py
@@ -0,0 +1,346 @@
+''' unit tests ONTAP Ansible module: na_ontap_volume_efficiency '''
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ patch_ansible, create_module, create_and_apply, expect_and_capture_ansible_exception
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke,\
+ register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_volume_efficiency \
+ import NetAppOntapVolumeEfficiency as volume_efficiency_module, main # module under test
+
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+DEFAULT_ARGS = {
+ 'hostname': '10.10.10.10',
+ 'username': 'username',
+ 'password': 'password',
+ 'vserver': 'vs1',
+ 'path': '/vol/volTest',
+ 'policy': 'auto',
+ 'use_rest': 'never',
+ 'enable_compression': True,
+ 'enable_inline_compression': True,
+ 'enable_cross_volume_inline_dedupe': True,
+ 'enable_inline_dedupe': True,
+ 'enable_data_compaction': True,
+ 'enable_cross_volume_background_dedupe': True
+}
+
+DEFAULT_ARGS_REST = {
+ 'hostname': '10.10.10.10',
+ 'username': 'username',
+ 'password': 'password',
+ 'vserver': 'vs1',
+ 'path': '/vol/volTest',
+ 'policy': 'auto',
+ 'use_rest': 'always'
+}
+
+
+def return_vol_info(state='enabled', status='idle', policy='auto'):
+ return {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'sis-status-info': {
+ 'path': '/vol/volTest',
+ 'state': state,
+ 'schedule': None,
+ 'status': status,
+ 'policy': policy,
+ 'is-inline-compression-enabled': 'true',
+ 'is-compression-enabled': 'true',
+ 'is-inline-dedupe-enabled': 'true',
+ 'is-data-compaction-enabled': 'true',
+ 'is-cross-volume-inline-dedupe-enabled': 'true',
+ 'is-cross-volume-background-dedupe-enabled': 'true'
+ }
+ }
+ }
+
+
+ZRR = zapi_responses({
+ 'vol_eff_info': build_zapi_response(return_vol_info()),
+ 'vol_eff_info_disabled': build_zapi_response(return_vol_info(state='disabled')),
+ 'vol_eff_info_running': build_zapi_response(return_vol_info(status='running')),
+ 'vol_eff_info_policy': build_zapi_response(return_vol_info(policy='default'))
+})
+
+
+def return_vol_info_rest(state='enabled', status='idle', policy='auto', compaction='inline'):
+ return {
+ "records": [{
+ "uuid": "25311eff",
+ "name": "test_e",
+ "efficiency": {
+ "compression": "both",
+ "storage_efficiency_mode": "default",
+ "dedupe": "both",
+ "cross_volume_dedupe": "both",
+ "compaction": compaction,
+ "schedule": "-",
+ "volume_path": "/vol/test_e",
+ "state": state,
+ "op_state": status,
+ "type": "regular",
+ "progress": "Idle for 02:06:26",
+ "last_op_begin": "Mon Jan 02 00:10:00 2023",
+ "last_op_end": "Mon Jan 02 00:10:00 2023",
+ "last_op_size": 0,
+ "last_op_state": "Success",
+ "policy": {"name": policy}
+ }
+ }],
+ "num_records": 1
+ }
+
+
+# REST API canned responses when mocking send_request
+SRR = rest_responses({
+ 'volume_efficiency_info': (200, return_vol_info_rest(), None),
+ 'volume_efficiency_status_running': (200, return_vol_info_rest(status='active'), None),
+ 'volume_efficiency_disabled': (200, return_vol_info_rest(state='disabled'), None),
+ 'volume_efficiency_modify': (200, return_vol_info_rest(compaction='none'), None),
+ "unauthorized": (403, None, {'code': 6, 'message': 'Unexpected argument "storage_efficiency_mode".'}),
+ "unexpected_arg": (403, None, {'code': 6, 'message': "not authorized for that command"})
+})
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ # with python 2.6, dictionaries are not ordered
+ fragments = ["missing required arguments:", "hostname", "vserver"]
+ error = create_module(volume_efficiency_module, {}, fail=True)['msg']
+ for fragment in fragments:
+ assert fragment in error
+ DEFAULT_ARGS_COPY = DEFAULT_ARGS.copy()
+ del DEFAULT_ARGS_COPY['path']
+ assert 'one of the following is required: path, volume_name' in create_module(volume_efficiency_module, DEFAULT_ARGS_COPY, fail=True)['msg']
+
+
+def test_ensure_get_called_existing():
+ ''' test get_volume_efficiency for existing config '''
+ register_responses([
+ ('sis-get-iter', ZRR['vol_eff_info'])
+ ])
+ my_obj = create_module(volume_efficiency_module, DEFAULT_ARGS)
+ assert my_obj.get_volume_efficiency()
+
+
+def test_successful_enable():
+ ''' enable volume_efficiency and testing idempotency '''
+ register_responses([
+ ('sis-get-iter', ZRR['vol_eff_info_disabled']),
+ ('sis-enable', ZRR['success']),
+ ('sis-get-iter', ZRR['vol_eff_info']),
+ # idempotency check
+ ('sis-get-iter', ZRR['vol_eff_info']),
+
+ ])
+ DEFAULT_ARGS_COPY = DEFAULT_ARGS.copy()
+ del DEFAULT_ARGS_COPY['path']
+ assert create_and_apply(volume_efficiency_module, DEFAULT_ARGS_COPY, {'volume_name': 'volTest'})['changed']
+ assert not create_and_apply(volume_efficiency_module, DEFAULT_ARGS)['changed']
+
+
+def test_successful_disable():
+ ''' disable volume_efficiency and testing idempotency '''
+ register_responses([
+ ('sis-get-iter', ZRR['vol_eff_info']),
+ ('sis-disable', ZRR['success']),
+ # idempotency check
+ ('sis-get-iter', ZRR['vol_eff_info_disabled']),
+
+ ])
+ args = {
+ 'state': 'absent',
+ 'use_rest': 'never'
+ }
+ assert create_and_apply(volume_efficiency_module, DEFAULT_ARGS_REST, args)['changed']
+ assert not create_and_apply(volume_efficiency_module, DEFAULT_ARGS_REST, args)['changed']
+
+
+def test_successful_modify():
+ ''' modifying volume_efficiency config and testing idempotency '''
+ register_responses([
+ ('sis-get-iter', ZRR['vol_eff_info']),
+ ('sis-set-config', ZRR['success']),
+ # idempotency check
+ ('sis-get-iter', ZRR['vol_eff_info_policy']),
+
+ ])
+ assert create_and_apply(volume_efficiency_module, DEFAULT_ARGS, {'policy': 'default'})['changed']
+ assert not create_and_apply(volume_efficiency_module, DEFAULT_ARGS, {'policy': 'default'})['changed']
+
+
+def test_successful_start():
+ ''' start volume_efficiency and testing idempotency '''
+ register_responses([
+ ('sis-get-iter', ZRR['vol_eff_info']),
+ ('sis-start', ZRR['success']),
+ # idempotency check
+ ('sis-get-iter', ZRR['vol_eff_info_running']),
+
+ ])
+ assert create_and_apply(volume_efficiency_module, DEFAULT_ARGS, {'volume_efficiency': 'start'})['changed']
+ assert not create_and_apply(volume_efficiency_module, DEFAULT_ARGS, {'volume_efficiency': 'start'})['changed']
+
+
+def test_successful_stop():
+ ''' stop volume_efficiency and testing idempotency '''
+ register_responses([
+ ('sis-get-iter', ZRR['vol_eff_info_running']),
+ ('sis-stop', ZRR['success']),
+ # idempotency check
+ ('sis-get-iter', ZRR['vol_eff_info']),
+
+ ])
+ assert create_and_apply(volume_efficiency_module, DEFAULT_ARGS, {'volume_efficiency': 'stop'})['changed']
+ assert not create_and_apply(volume_efficiency_module, DEFAULT_ARGS, {'volume_efficiency': 'stop'})['changed']
+
+
+def test_if_all_methods_catch_exception():
+ register_responses([
+ ('sis-get-iter', ZRR['error']),
+ ('sis-set-config', ZRR['error']),
+ ('sis-start', ZRR['error']),
+ ('sis-stop', ZRR['error']),
+ ('sis-enable', ZRR['error']),
+ ('sis-disable', ZRR['error']),
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'storage/volumes', SRR['generic_error']),
+ ('PATCH', 'storage/volumes', SRR['generic_error']),
+ ('PATCH', 'storage/volumes', SRR['unauthorized']),
+ ('PATCH', 'storage/volumes', SRR['unexpected_arg'])
+ ])
+ vol_eff_obj = create_module(volume_efficiency_module, DEFAULT_ARGS)
+ assert 'Error getting volume efficiency' in expect_and_capture_ansible_exception(vol_eff_obj.get_volume_efficiency, 'fail')['msg']
+ assert 'Error modifying storage efficiency' in expect_and_capture_ansible_exception(vol_eff_obj.modify_volume_efficiency, 'fail', {})['msg']
+ assert 'Error starting storage efficiency' in expect_and_capture_ansible_exception(vol_eff_obj.start_volume_efficiency, 'fail')['msg']
+ assert 'Error stopping storage efficiency' in expect_and_capture_ansible_exception(vol_eff_obj.stop_volume_efficiency, 'fail')['msg']
+ assert 'Error enabling storage efficiency' in expect_and_capture_ansible_exception(vol_eff_obj.enable_volume_efficiency, 'fail')['msg']
+ assert 'Error disabling storage efficiency' in expect_and_capture_ansible_exception(vol_eff_obj.disable_volume_efficiency, 'fail')['msg']
+
+ args = {'state': 'absent', 'enable_compression': True}
+ modify = {'enabled': 'disabled'}
+ vol_eff_obj = create_module(volume_efficiency_module, DEFAULT_ARGS_REST, args)
+ assert 'Error getting volume efficiency' in expect_and_capture_ansible_exception(vol_eff_obj.get_volume_efficiency, 'fail')['msg']
+ assert 'Error in volume/efficiency patch' in expect_and_capture_ansible_exception(vol_eff_obj.modify_volume_efficiency, 'fail', {'arg': 1})['msg']
+ assert 'cannot modify storage_efficiency' in expect_and_capture_ansible_exception(vol_eff_obj.modify_volume_efficiency, 'fail', {'arg': 1})['msg']
+ assert 'user is not authorized' in expect_and_capture_ansible_exception(vol_eff_obj.modify_volume_efficiency, 'fail', {'arg': 1})['msg']
+ # Error: cannot set compression keys: ['enable_compression']
+ assert 'when volume efficiency already disabled' in expect_and_capture_ansible_exception(vol_eff_obj.validate_efficiency_compression, 'fail', {})['msg']
+ assert 'when trying to disable volume' in expect_and_capture_ansible_exception(vol_eff_obj.validate_efficiency_compression, 'fail', modify)['msg']
+
+
+def test_successful_enable_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'storage/volumes', SRR['volume_efficiency_disabled']),
+ ('PATCH', 'storage/volumes/25311eff', SRR['success']),
+ # idempotency check
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'storage/volumes', SRR['volume_efficiency_info']),
+ ])
+ assert create_and_apply(volume_efficiency_module, DEFAULT_ARGS_REST, {'use_rest': 'always'})['changed']
+ assert not create_and_apply(volume_efficiency_module, DEFAULT_ARGS_REST, {'use_rest': 'always'})['changed']
+
+
+def test_successful_disable_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'storage/volumes', SRR['volume_efficiency_info']),
+ ('PATCH', 'storage/volumes/25311eff', SRR['success']),
+ # idempotency check
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'storage/volumes', SRR['volume_efficiency_disabled']),
+ ])
+ assert create_and_apply(volume_efficiency_module, DEFAULT_ARGS_REST, {'state': 'absent'})['changed']
+ assert not create_and_apply(volume_efficiency_module, DEFAULT_ARGS_REST, {'state': 'absent'})['changed']
+
+
+def test_successful_modify_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'storage/volumes', SRR['volume_efficiency_info']),
+ ('PATCH', 'storage/volumes/25311eff', SRR['success']),
+ # idempotency check
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'storage/volumes', SRR['volume_efficiency_modify']),
+ ])
+ assert create_and_apply(volume_efficiency_module, DEFAULT_ARGS_REST, {'enable_data_compaction': False})['changed']
+ assert not create_and_apply(volume_efficiency_module, DEFAULT_ARGS_REST, {'enable_data_compaction': False})['changed']
+
+
+def test_successful_enable_vol_efficiency_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'storage/volumes', SRR['volume_efficiency_disabled']),
+ ('PATCH', 'storage/volumes/25311eff', SRR['success']),
+ # idempotency check
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'storage/volumes', SRR['volume_efficiency_info']),
+ ])
+ DEFAULT_ARGS_REST_COPY = DEFAULT_ARGS_REST.copy()
+ del DEFAULT_ARGS_REST_COPY['path']
+ assert create_and_apply(volume_efficiency_module, DEFAULT_ARGS_REST_COPY, {'volume_name': 'vol1'})['changed']
+ assert not create_and_apply(volume_efficiency_module, DEFAULT_ARGS_REST)['changed']
+
+
+def test_successful_start_rest_all_options():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_1']),
+ ('GET', 'storage/volumes', SRR['volume_efficiency_info']),
+ ('PATCH', 'storage/volumes/25311eff', SRR['success']),
+ # idempotency check
+ ('GET', 'cluster', SRR['is_rest_9_11_1']),
+ ('GET', 'storage/volumes', SRR['volume_efficiency_status_running']),
+ ])
+ args = {
+ 'volume_efficiency': 'start',
+ 'start_ve_scan_old_data': True
+ }
+ assert create_and_apply(volume_efficiency_module, DEFAULT_ARGS_REST, args)['changed']
+ assert not create_and_apply(volume_efficiency_module, DEFAULT_ARGS_REST, args)['changed']
+
+
+def test_successful_stop_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_11_1']),
+ ('GET', 'storage/volumes', SRR['volume_efficiency_status_running']),
+ ('PATCH', 'storage/volumes/25311eff', SRR['success']),
+ # idempotency check
+ ('GET', 'cluster', SRR['is_rest_9_11_1']),
+ ('GET', 'storage/volumes', SRR['volume_efficiency_info']),
+ ])
+ args = {'volume_efficiency': 'stop'}
+ assert create_and_apply(volume_efficiency_module, DEFAULT_ARGS_REST, args)['changed']
+ assert not create_and_apply(volume_efficiency_module, DEFAULT_ARGS_REST, args)['changed']
+
+
+def test_negative_modify_rest_se_mode_no_version():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ])
+ error = 'Error: Minimum version of ONTAP for storage_efficiency_mode is (9, 10, 1)'
+ assert error in create_module(volume_efficiency_module, DEFAULT_ARGS_REST, {'storage_efficiency_mode': 'default'}, fail=True)['msg']
+ error = 'Error: cannot set storage_efficiency_mode in ZAPI'
+ assert error in create_module(volume_efficiency_module, DEFAULT_ARGS, {'storage_efficiency_mode': 'default'}, fail=True)['msg']
+
+
+def test_modify_rest_se_mode():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'storage/volumes', SRR['volume_efficiency_info']),
+ ('PATCH', 'storage/volumes/25311eff', SRR['success'])
+ ])
+ assert create_and_apply(volume_efficiency_module, DEFAULT_ARGS_REST, {'storage_efficiency_mode': 'efficient'})['changed']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_volume_rest.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_volume_rest.py
new file mode 100644
index 000000000..47525beec
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_volume_rest.py
@@ -0,0 +1,1440 @@
+# (c) 2020-2023, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test template for ONTAP Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import copy
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import\
+ assert_no_warnings, assert_warning_was_raised, print_warnings, call_main, create_and_apply,\
+ create_module, expect_and_capture_ansible_exception, patch_ansible
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import\
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_volume \
+ import NetAppOntapVolume as volume_module, main as my_main # module under test
+
+# needed for get and modify/delete as they still use ZAPI
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+volume_info = {
+ "uuid": "7882901a-1aef-11ec-a267-005056b30cfa",
+ "comment": "carchi8py",
+ "name": "test_svm",
+ "state": "online",
+ "style": "flexvol",
+ "tiering": {
+ "policy": "backup",
+ "min_cooling_days": 0
+ },
+ "type": "rw",
+ "aggregates": [
+ {
+ "name": "aggr1",
+ "uuid": "aggr1_uuid"
+ }
+ ],
+ "encryption": {
+ "enabled": True
+ },
+ "efficiency": {
+ "compression": "none",
+ "policy": {
+ "name": "-"
+ }
+ },
+ "files": {
+ "maximum": 2000
+ },
+ "nas": {
+ "gid": 0,
+ "security_style": "unix",
+ "uid": 0,
+ "unix_permissions": 654,
+ "path": '/this/path',
+ "export_policy": {
+ "name": "default"
+ }
+ },
+ "snapshot_policy": {
+ "name": "default",
+ "uuid": "0a42a3d9-0c29-11ec-a267-005056b30cfa"
+ },
+ "space": {
+ "logical_space": {
+ "enforcement": False,
+ "reporting": False,
+ },
+ "size": 10737418240,
+ "snapshot": {
+ "reserve_percent": 5
+ }
+ },
+ "guarantee": {
+ "type": "volume"
+ },
+ "snaplock": {
+ "type": "non_snaplock"
+ },
+ "analytics": {
+ "state": "on"
+ }
+}
+
+volume_info_mount = copy.deepcopy(volume_info)
+volume_info_mount['nas']['path'] = ''
+del volume_info_mount['nas']['path']
+volume_info_encrypt_off = copy.deepcopy(volume_info)
+volume_info_encrypt_off['encryption']['enabled'] = False
+volume_info_sl_enterprise = copy.deepcopy(volume_info)
+volume_info_sl_enterprise['snaplock']['type'] = 'enterprise'
+volume_info_sl_enterprise['snaplock']['retention'] = {'default': 'P30Y'}
+volume_analytics_disabled = copy.deepcopy(volume_info)
+volume_analytics_disabled['analytics']['state'] = 'off'
+volume_analytics_initializing = copy.deepcopy(volume_info)
+volume_analytics_initializing['analytics']['state'] = 'initializing'
+volume_info_offline = copy.deepcopy(volume_info)
+volume_info_offline['state'] = 'offline'
+volume_info_tags = copy.deepcopy(volume_info)
+volume_info_tags['_tags'] = ["team:csi", "environment:test"]
+
+# REST API canned responses when mocking send_request
+SRR = rest_responses({
+ # common responses
+ 'is_rest': (200, dict(version=dict(generation=9, major=8, minor=0, full='dummy')), None),
+ 'is_rest_96': (200, dict(version=dict(generation=9, major=6, minor=0, full='dummy_9_6_0')), None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'no_record': (200, {'num_records': 0, 'records': []}, None),
+ 'end_of_sequence': (500, None, "Unexpected call to send_request"),
+ 'generic_error': (400, None, "Expected error"),
+ # module specific responses
+ # Volume
+ 'get_volume': (200, {'records': [volume_info]}, None),
+ 'get_volume_sl_enterprise': (200, {'records': [volume_info_sl_enterprise]}, None),
+ 'get_volume_mount': (200, {'records': [volume_info_mount]}, None),
+ 'get_volume_encrypt_off': (200, {'records': [volume_info_encrypt_off]}, None),
+ # module specific responses
+ 'nas_app_record': (200,
+ {'records': [{"uuid": "09e9fd5e-8ebd-11e9-b162-005056b39fe7",
+ "name": "test_app",
+ "nas": {
+ "application_components": [{'xxx': 1}],
+ }}]}, None),
+ 'nas_app_record_by_uuid': (200,
+ {"uuid": "09e9fd5e-8ebd-11e9-b162-005056b39fe7",
+ "name": "test_app",
+ "nas": {
+ "application_components": [{'xxx': 1}],
+ "flexcache": {
+ "origin": {'svm': {'name': 'org_name'}}
+ }
+ }}, None),
+ 'get_aggr_one_object_store': (200,
+ {'records': ['one']}, None),
+ 'get_aggr_two_object_stores': (200,
+ {'records': ['two']}, None),
+ 'move_state_replicating': (200, {'movement': {'state': 'replicating'}}, None),
+ 'move_state_success': (200, {'movement': {'state': 'success'}}, None),
+ 'encrypting': (200, {'encryption': {'status': {'message': 'initializing'}}}, None),
+ 'encrypted': (200, {'encryption': {'state': 'encrypted'}}, None),
+ 'analytics_off': (200, {'records': [volume_analytics_disabled]}, None),
+ 'analytics_initializing': (200, {'records': [volume_analytics_initializing]}, None),
+ 'one_svm_record': (200, {'records': [{'uuid': 'svm_uuid'}]}, None),
+ 'volume_info_offline': (200, {'records': [volume_info_offline]}, None),
+ 'volume_info_tags': (200, {'records': [volume_info_tags]}, None)
+})
+
+DEFAULT_APP_ARGS = {
+ 'name': 'test_svm',
+ 'vserver': 'ansibleSVM',
+ 'nas_application_template': dict(
+ tiering=None
+ ),
+ # 'aggregate_name': 'whatever', # not used for create when using REST application/applications
+ 'size': 10,
+ 'size_unit': 'gb',
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'use_rest': 'always'
+}
+
+DEFAULT_VOLUME_ARGS = {
+ 'name': 'test_svm',
+ 'vserver': 'ansibleSVM',
+ 'aggregate_name': 'aggr1',
+ 'size': 10,
+ 'size_unit': 'gb',
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'use_rest': 'always'
+}
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ exc = create_module(volume_module, fail=True)
+ print('Info: %s' % exc['msg'])
+ assert 'missing required arguments:' in exc['msg']
+
+
+def test_fail_if_aggr_is_set():
+ module_args = {'aggregate_name': 'should_fail'}
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ])
+ error = 'Conflict: aggregate_name is not supported when application template is enabled. Found: aggregate_name: should_fail'
+ assert create_module(volume_module, DEFAULT_APP_ARGS, module_args, fail=True)['msg'] == error
+
+
+def test_missing_size():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['no_record']), # GET volume
+ ('GET', 'svm/svms', SRR['one_svm_record']), # GET svm
+ ('GET', 'application/applications', SRR['no_record']), # GET application/applications
+ ])
+ data = dict(DEFAULT_APP_ARGS)
+ data.pop('size')
+ error = 'Error: "size" is required to create nas application.'
+ assert create_and_apply(volume_module, data, fail=True)['msg'] == error
+
+
+def test_mismatched_tiering_policies():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ])
+ module_args = {
+ 'tiering_policy': 'none',
+ 'nas_application_template': {'tiering': {'policy': 'auto'}}
+ }
+ error = 'Conflict: if tiering_policy and nas_application_template tiering policy are both set, they must match.'\
+ ' Found "none" and "auto".'
+ assert create_module(volume_module, DEFAULT_APP_ARGS, module_args, fail=True)['msg'] == error
+
+
+def test_rest_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['no_record']), # GET volume
+ ('GET', 'svm/svms', SRR['one_svm_record']), # GET svm
+ ('GET', 'application/applications', SRR['no_record']), # GET application/applications
+ ('POST', 'application/applications', SRR['generic_error']), # POST application/applications
+ ])
+ error = 'Error in create_nas_application: calling: application/applications: got %s.' % SRR['generic_error'][2]
+ assert create_and_apply(volume_module, DEFAULT_APP_ARGS, fail=True)['msg'] == error
+
+
+def test_rest_successfully_created():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['no_record']), # Get Volume
+ ('GET', 'svm/svms', SRR['one_svm_record']), # GET svm
+ ('GET', 'application/applications', SRR['no_record']), # GET application/applications
+ ('POST', 'application/applications', SRR['empty_good']), # POST application/applications
+ ('GET', 'storage/volumes', SRR['get_volume']),
+ ])
+ assert create_and_apply(volume_module, DEFAULT_APP_ARGS)['changed']
+
+
+def test_rest_create_idempotency():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_volume']), # Get Volume
+ ('GET', 'application/applications', SRR['no_record']), # GET application/applications
+
+ ])
+ assert not create_and_apply(volume_module, DEFAULT_APP_ARGS)['changed']
+
+
+def test_rest_successfully_created_with_modify():
+ ''' since language is not supported in application, the module is expected to:
+ 1. create the volume using application REST API
+ 2. immediately modify the volume to update options which are not available in the nas template.
+ '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_volume']), # Get Volume
+ ('GET', 'application/applications', SRR['no_record']), # GET application/applications
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['empty_good']), # set unix_permissions
+ ])
+ module_args = {
+ 'language': 'fr',
+ 'unix_permissions': '---rw-r-xr-x'
+ }
+ assert create_and_apply(volume_module, DEFAULT_APP_ARGS, module_args)['changed']
+
+
+def test_rest_successfully_resized():
+ ''' make sure resize if using RESP API if sizing_method is present
+ '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_volume']), # Get Volume
+ ('GET', 'application/applications', SRR['no_record']), # GET application/applications
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['empty_good']), # PATCH storage/volumes
+ ])
+ module_args = {
+ 'sizing_method': 'add_new_resources',
+ 'size': 20737418240
+ }
+ assert create_and_apply(volume_module, DEFAULT_APP_ARGS, module_args)['changed']
+
+
+def test_rest_volume_create_modify_tags():
+ ''' volume create, modify with tags
+ '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_13_1']),
+ ('GET', 'storage/volumes', SRR['no_record']),
+ ('GET', 'svm/svms', SRR['one_svm_record']),
+ ('POST', 'storage/volumes', SRR['success']),
+ ('GET', 'storage/volumes', SRR['volume_info_tags']),
+ # idempotent check
+ ('GET', 'cluster', SRR['is_rest_9_13_1']),
+ ('GET', 'storage/volumes', SRR['volume_info_tags']),
+ # modify tags
+ ('GET', 'cluster', SRR['is_rest_9_13_1']),
+ ('GET', 'storage/volumes', SRR['volume_info_tags']),
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['success']),
+ ])
+ module_args = {'tags': ["team:csi", "environment:test"]}
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args)['changed']
+ assert not create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args)['changed']
+ module_args = {'tags': ["team:csi"]}
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args)['changed']
+
+
+def test_rest_successfully_deleted():
+ ''' delete volume using REST - no app
+ '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_volume']), # Get Volume
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['empty_good']), # PATCH storage/volumes - unmount
+ ('DELETE', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['empty_good']), # DELETE storage/volumes
+ ])
+ module_args = {'state': 'absent'}
+ assert create_and_apply(volume_module, DEFAULT_APP_ARGS, module_args)['changed']
+ assert_no_warnings()
+
+
+def test_rest_successfully_deleted_with_warning():
+ ''' delete volume using REST - no app - unmount failed
+ '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_volume']), # Get Volume
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['generic_error']), # PATCH storage/volumes - unmount
+ ('DELETE', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['empty_good']), # DELETE storage/volumes
+ ])
+ module_args = {'state': 'absent'}
+ assert create_and_apply(volume_module, DEFAULT_APP_ARGS, module_args)['changed']
+ print_warnings()
+ assert_warning_was_raised('Volume was successfully deleted though unmount failed with: calling: '
+ 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa: got Expected error.')
+
+
+def test_rest_successfully_deleted_with_app():
+ ''' delete app
+ '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_volume']), # Get Volume
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['empty_good']), # PATCH storage/volumes - unmount
+ ('DELETE', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['empty_good']), # DELETE storage/volumes
+ ])
+ module_args = {'state': 'absent'}
+ assert create_and_apply(volume_module, DEFAULT_APP_ARGS, module_args)['changed']
+
+
+def test_rest_successfully_move_volume():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_volume']), # Get Volume
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['no_record']), # Move volume
+ ])
+ module_args = {'aggregate_name': 'aggr2'}
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args)['changed']
+
+
+def test_rest_error_move_volume():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_volume']), # Get Volume
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['generic_error']), # Move volume
+ ])
+ module_args = {'aggregate_name': 'aggr2'}
+ msg = "Error moving volume test_svm: calling: storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa: got Expected error."
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_rest_error_rehost_volume():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['zero_records']), # Get Volume
+ ('GET', 'svm/svms', SRR['one_svm_record']), # GET svm
+ ])
+ module_args = {'from_vserver': 'svm_orig'}
+ msg = "Error: ONTAP REST API does not support Rehosting Volumes"
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_rest_successfully_volume_unmount_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_volume']), # Get Volume
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['no_record']), # Mount Volume
+ ])
+ module_args = {'junction_path': ''}
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args)['changed']
+
+
+def test_rest_error_volume_unmount_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_volume']), # Get Volume
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['generic_error']), # Mount Volume
+ ])
+ module_args = {'junction_path': ''}
+ msg = 'Error unmounting volume test_svm with path "": calling: storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa: got Expected error.'
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_rest_successfully_volume_mount_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_volume_mount']), # Get Volume
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['no_record']), # Mount Volume
+ ])
+ module_args = {'junction_path': '/this/path'}
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args)['changed']
+
+
+def test_rest_successfully_volume_mount_do_nothing_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_volume_mount']), # Get Volume
+ ])
+ module_args = {'junction_path': ''}
+ assert not create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args)['changed']
+
+
+def test_rest_error_volume_mount_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_volume_mount']), # Get Volume
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['generic_error']), # Mount Volume
+ ])
+ module_args = {'junction_path': '/this/path'}
+ msg = 'Error mounting volume test_svm with path "/this/path": calling: storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa: got Expected error.'
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_rest_successfully_change_volume_state():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_volume']), # Get Volume
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['success']), # Move volume
+ ])
+ module_args = {'is_online': False}
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args)['changed']
+
+
+def test_rest_error_change_volume_state():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_volume']), # Get Volume
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['generic_error']), # Move volume
+ ])
+ module_args = {'is_online': False}
+ msg = "Error changing state of volume test_svm: calling: storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa: got Expected error."
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_rest_successfully_modify_attributes():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_volume']), # Get Volume
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['no_record']), # Modify
+ ])
+ module_args = {
+ 'space_guarantee': 'volume',
+ 'percent_snapshot_space': 10,
+ 'snapshot_policy': 'default2',
+ 'export_policy': 'default2',
+ 'group_id': 5,
+ 'user_id': 5,
+ 'volume_security_style': 'mixed',
+ 'comment': 'carchi8py was here',
+ 'tiering_minimum_cooling_days': 10,
+ 'logical_space_enforcement': True,
+ 'logical_space_reporting': True
+ }
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args)['changed']
+
+
+def test_rest_error_modify_attributes():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_volume']), # Get Volume
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['generic_error']), # Modify
+ ])
+ module_args = {
+ 'space_guarantee': 'volume',
+ 'percent_snapshot_space': 10,
+ 'snapshot_policy': 'default2',
+ 'export_policy': 'default2',
+ 'group_id': 5,
+ 'user_id': 5,
+ 'volume_security_style': 'mixed',
+ 'comment': 'carchi8py was here',
+ }
+ msg = "Error modifying volume test_svm: calling: storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa: got Expected error."
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_rest_successfully_create_volume():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['no_record']), # Get Volume
+ ('GET', 'svm/svms', SRR['one_svm_record']), # GET svm
+ ('POST', 'storage/volumes', SRR['no_record']), # Create Volume
+ ('GET', 'storage/volumes', SRR['get_volume']),
+ ])
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS)['changed']
+
+
+def test_rest_error_get_volume():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['generic_error']), # Get Volume
+ ])
+ msg = "calling: storage/volumes: got Expected error."
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, fail=True)['msg'] == msg
+
+
+def test_rest_error_create_volume():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['no_record']), # Get Volume
+ ('GET', 'svm/svms', SRR['one_svm_record']), # GET svm
+ ('POST', 'storage/volumes', SRR['generic_error']), # Create Volume
+ ])
+ msg = "Error creating volume test_svm: calling: storage/volumes: got Expected error."
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, fail=True)['msg'] == msg
+
+
+def test_rest_successfully_create_volume_with_options():
+ module_args = {
+ 'space_guarantee': 'volume',
+ 'percent_snapshot_space': 5,
+ 'snapshot_policy': 'default',
+ 'export_policy': 'default',
+ 'group_id': 0,
+ 'user_id': 0,
+ 'volume_security_style': 'unix',
+ 'comment': 'carchi8py',
+ 'type': 'RW',
+ 'language': 'en',
+ 'encrypt': True,
+ 'junction_path': '/this/path',
+ 'tiering_policy': 'backup',
+ 'tiering_minimum_cooling_days': 10,
+ }
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['no_record']), # Get Volume
+ ('GET', 'svm/svms', SRR['one_svm_record']), # GET svm
+ ('POST', 'storage/volumes', SRR['no_record']), # Create Volume
+ ('GET', 'storage/volumes', SRR['get_volume']),
+ # TODO - force a patch after create
+ # ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['no_record']), # modify Volume
+ ])
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS)['changed']
+
+
+def test_rest_successfully_snapshot_restore_volume():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_volume']), # Get Volume
+ ('GET', 'storage/volumes', SRR['get_volume']), # Get Volume
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['no_record']), # Modify Snapshot restore
+ ])
+ module_args = {'snapshot_restore': 'snapshot_copy'}
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args)['changed']
+
+
+def test_rest_error_snapshot_restore_volume():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_volume']), # Get Volume
+ ('GET', 'storage/volumes', SRR['get_volume']), # Get Volume
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['generic_error']), # Modify Snapshot restore
+ ])
+ module_args = {'snapshot_restore': 'snapshot_copy'}
+ msg = "Error restoring snapshot snapshot_copy in volume test_svm: calling: storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa: got Expected error."
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_rest_error_snapshot_restore_volume_no_parent():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['zero_records']), # Get Volume
+ ('GET', 'svm/svms', SRR['one_svm_record']), # GET svm
+ ])
+ module_args = {'snapshot_restore': 'snapshot_copy'}
+ msg = "Error restoring volume: cannot find parent: test_svm"
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_rest_successfully_rename_volume():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['no_record']), # Get Volume name
+ ('GET', 'svm/svms', SRR['one_svm_record']), # GET svm
+ ('GET', 'storage/volumes', SRR['get_volume']), # Get Volume from
+ ('GET', 'storage/volumes', SRR['get_volume']), # Get Volume from
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['no_record']), # Patch
+ ])
+ module_args = {
+ 'from_name': 'test_svm',
+ 'name': 'new_name'
+ }
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args)['changed']
+
+
+def test_rest_error_rename_volume():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['no_record']), # Get Volume name
+ ('GET', 'svm/svms', SRR['one_svm_record']), # GET svm
+ ('GET', 'storage/volumes', SRR['get_volume']), # Get Volume from
+ ('GET', 'storage/volumes', SRR['get_volume']), # Get Volume from
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['generic_error']), # Patch
+ ])
+ module_args = {
+ 'from_name': 'test_svm',
+ 'name': 'new_name'
+ }
+ msg = "Error changing name of volume new_name: calling: storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa: got Expected error."
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_rest_error_resizing_volume():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_volume']), # Get Volume name
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['generic_error']), # Resize volume
+ ])
+ module_args = {
+ 'sizing_method': 'add_new_resources',
+ 'size': 20737418240
+ }
+ msg = "Error resizing volume test_svm: calling: storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa: got Expected error."
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_rest_successfully_create_volume_with_unix_permissions():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['no_record']), # Get Volume
+ ('GET', 'svm/svms', SRR['one_svm_record']), # GET svm
+ ('POST', 'storage/volumes', SRR['no_record']), # Create Volume
+ ('GET', 'storage/volumes', SRR['get_volume']),
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['no_record']), # add unix permissions
+ ])
+ module_args = {'unix_permissions': '---rw-r-xr-x'}
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args)['changed']
+
+
+def test_rest_successfully_create_volume_with_qos_policy():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['no_record']), # Get Volume
+ ('GET', 'svm/svms', SRR['one_svm_record']), # GET svm
+ ('POST', 'storage/volumes', SRR['no_record']), # Create Volume
+ ('GET', 'storage/volumes', SRR['get_volume']),
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['no_record']), # Set policy name
+ ])
+ module_args = {'qos_policy_group': 'policy-name'}
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args)['changed']
+
+
+def test_rest_successfully_create_volume_with_qos_adaptive_policy_group():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['no_record']), # Get Volume
+ ('GET', 'svm/svms', SRR['one_svm_record']), # GET svm
+ ('POST', 'storage/volumes', SRR['no_record']), # Create Volume
+ ('GET', 'storage/volumes', SRR['get_volume']),
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['no_record']), # Set policy name
+ ])
+ module_args = {'qos_adaptive_policy_group': 'policy-name'}
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args)['changed']
+
+
+def test_rest_successfully_create_volume_with_qos_adaptive_policy_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ])
+ module_args = {
+ 'qos_adaptive_policy_group': 'policy-name',
+ 'qos_policy_group': 'policy-name'
+ }
+ msg = "Error: With Rest API qos_policy_group and qos_adaptive_policy_group are now the same thing, and cannot be set at the same time"
+ assert create_module(volume_module, DEFAULT_VOLUME_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_rest_successfully_create_volume_with_tiering_policy():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['no_record']), # Get Volume
+ ('GET', 'svm/svms', SRR['one_svm_record']), # GET svm
+ ('POST', 'storage/volumes', SRR['no_record']), # Create Volume
+ ('GET', 'storage/volumes', SRR['get_volume']),
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['no_record']), # Set Tiering_policy
+ ])
+ module_args = {'tiering_policy': 'all'}
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args)['changed']
+
+
+def test_rest_successfully_create_volume_encrypt():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['no_record']), # Get Volume
+ ('GET', 'svm/svms', SRR['one_svm_record']), # GET svm
+ ('POST', 'storage/volumes', SRR['no_record']), # Create Volume
+ ('GET', 'storage/volumes', SRR['get_volume']),
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['no_record']), # Set Encryption
+ ])
+ module_args = {'encrypt': False}
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args)['changed']
+
+
+@patch('time.sleep')
+def test_rest_successfully_modify_volume_encrypt(sleep):
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_volume_encrypt_off']), # Get Volume
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['no_record']), # Set Encryption
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_volume_encrypt_off']),
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['no_record']),
+ ('GET', 'storage/volumes', SRR['encrypting']),
+ ('GET', 'storage/volumes', SRR['encrypting']),
+ ('GET', 'storage/volumes', SRR['encrypting']),
+ ('GET', 'storage/volumes', SRR['encrypted']),
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_volume_encrypt_off']),
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['no_record']),
+ ('GET', 'storage/volumes', SRR['generic_error']),
+ ('GET', 'storage/volumes', SRR['generic_error']),
+ ('GET', 'storage/volumes', SRR['generic_error']),
+ ('GET', 'storage/volumes', SRR['generic_error']),
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_volume'])
+ ])
+ module_args = {'encrypt': True}
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args)['changed']
+ module_args = {'encrypt': True, 'wait_for_completion': True}
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args)['changed']
+ error = 'Error getting volume encryption_conversion status'
+ assert error in create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args, fail=True)['msg']
+ error = 'unencrypting volume is only supported when moving the volume to another aggregate in REST'
+ assert error in create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, {'encrypt': False}, fail=True)['msg']
+
+
+def test_rest_error_modify_volume_encrypt():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_volume_encrypt_off']), # Get Volume
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['generic_error']), # Set Encryption
+ ])
+ module_args = {'encrypt': True}
+ msg = "Error enabling encryption for volume test_svm: calling: storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa: got Expected error."
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_rest_successfully_modify_volume_compression():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_volume_encrypt_off']), # Get Volume
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['no_record']), # compression
+ ])
+ module_args = {
+ 'efficiency_policy': 'test',
+ 'compression': True
+ }
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args)['changed']
+
+
+def test_rest_successfully_modify_volume_inline_compression():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_volume_encrypt_off']), # Get Volume
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['no_record']), # compression
+ ])
+ module_args = {
+ 'efficiency_policy': 'test',
+ 'inline_compression': True
+ }
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args)['changed']
+
+
+def test_rest_error_modify_volume_efficiency_policy():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_volume_encrypt_off']), # Get Volume
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['generic_error']), # Set Encryption
+ ])
+ module_args = {'efficiency_policy': 'test'}
+ msg = "Error setting efficiency for volume test_svm: calling: storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa: got Expected error."
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_rest_error_volume_compression_both():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_volume_encrypt_off']), # Get Volume
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['generic_error']), # Set Encryption
+ ])
+ module_args = {
+ 'compression': True,
+ 'inline_compression': True
+ }
+ msg = "Error setting efficiency for volume test_svm: calling: storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa: got Expected error."
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_rest_error_modify_volume_efficiency_policy_with_ontap_96():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ])
+ module_args = {'efficiency_policy': 'test'}
+ msg = "Error: Minimum version of ONTAP for efficiency_policy is (9, 7)."
+ assert msg in create_module(volume_module, DEFAULT_VOLUME_ARGS, module_args, fail=True)['msg']
+
+
+def test_rest_error_modify_volume_tiering_minimum_cooling_days_98():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ])
+ module_args = {'tiering_minimum_cooling_days': 2}
+ msg = "Error: Minimum version of ONTAP for tiering_minimum_cooling_days is (9, 8)."
+ assert msg in create_module(volume_module, DEFAULT_VOLUME_ARGS, module_args, fail=True)['msg']
+
+
+def test_rest_successfully_created_with_logical_space():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['no_record']), # Get Volume
+ ('GET', 'svm/svms', SRR['one_svm_record']), # GET svm
+ ('POST', 'storage/volumes', SRR['no_record']), # Create Volume
+ ('GET', 'storage/volumes', SRR['get_volume']),
+ ])
+ module_args = {
+ 'logical_space_enforcement': False,
+ 'logical_space_reporting': False
+ }
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args)['changed']
+
+
+def test_rest_error_modify_backend_fabricpool():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_volume']),
+ ('GET', 'storage/aggregates/aggr1_uuid/cloud-stores', SRR['no_record']), # get_aggr_object_stores
+ ])
+ module_args = {
+ 'nas_application_template': {'tiering': {'control': 'required'}},
+ 'feature_flags': {'warn_or_fail_on_fabricpool_backend_change': 'fail'}
+ }
+
+ msg = "Error: changing a volume from one backend to another is not allowed. Current tiering control: disallowed, desired: required."
+ assert create_and_apply(volume_module, DEFAULT_APP_ARGS, module_args, fail=True)['msg'] == msg
+
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_volume']),
+ ('GET', 'application/applications', SRR['no_record']), # TODO: modify
+ ])
+ module_args['feature_flags'] = {'warn_or_fail_on_fabricpool_backend_change': 'invalid'}
+ assert not create_and_apply(volume_module, DEFAULT_APP_ARGS, module_args)['changed']
+ print_warnings()
+ warning = "Unexpected value 'invalid' for warn_or_fail_on_fabricpool_backend_change, expecting: None, 'ignore', 'fail', 'warn'"
+ assert_warning_was_raised(warning)
+
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_volume']),
+ ('GET', 'storage/aggregates/aggr1_uuid/cloud-stores', SRR['no_record']), # get_aggr_object_stores
+ ('GET', 'application/applications', SRR['no_record']), # TODO: modify
+ ])
+ module_args['feature_flags'] = {'warn_or_fail_on_fabricpool_backend_change': 'warn'}
+ assert not create_and_apply(volume_module, DEFAULT_APP_ARGS, module_args)['changed']
+ warning = "Ignored %s" % msg
+ print_warnings()
+ assert_warning_was_raised(warning)
+
+
+def test_rest_negative_modify_backend_fabricpool():
+ ''' fail to get aggregate object store'''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_volume']),
+ ('GET', 'storage/aggregates/aggr1_uuid/cloud-stores', SRR['generic_error']),
+ ])
+ module_args = {
+ 'nas_application_template': {'tiering': {'control': 'required'}},
+ 'feature_flags': {'warn_or_fail_on_fabricpool_backend_change': 'fail'}
+ }
+ msg = "Error getting object store for aggregate: aggr1: calling: storage/aggregates/aggr1_uuid/cloud-stores: got Expected error."
+ assert create_and_apply(volume_module, DEFAULT_APP_ARGS, module_args, fail=True)['msg'] == msg
+
+
+def test_rest_tiering_control():
+ ''' The volume is supported by one or more aggregates
+ If all aggregates are associated with one or more object stores, the volume has a FabricPool backend.
+ If all aggregates are not associated with one or more object stores, the volume meets the 'disallowed' criteria.
+ '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/aggregates/uuid1/cloud-stores', SRR['no_record']), # get_aggr_object_stores aggr1
+ ('GET', 'storage/aggregates/uuid2/cloud-stores', SRR['no_record']), # get_aggr_object_stores aggr2
+ ('GET', 'storage/aggregates/uuid1/cloud-stores', SRR['get_aggr_one_object_store']), # get_aggr_object_stores aggr1
+ ('GET', 'storage/aggregates/uuid2/cloud-stores', SRR['no_record']), # get_aggr_object_stores aggr2
+ ('GET', 'storage/aggregates/uuid1/cloud-stores', SRR['get_aggr_two_object_stores']), # get_aggr_object_stores aggr1
+ ('GET', 'storage/aggregates/uuid2/cloud-stores', SRR['get_aggr_one_object_store']), # get_aggr_object_stores aggr2
+ ])
+ module_args = {
+ 'nas_application_template': {'tiering': {'control': 'required'}},
+ 'feature_flags': {'warn_or_fail_on_fabricpool_backend_change': 'fail'}
+ }
+ current = {'aggregates': [{'name': 'aggr1', 'uuid': 'uuid1'}, {'name': 'aggr2', 'uuid': 'uuid2'}]}
+ vol_object = create_module(volume_module, DEFAULT_APP_ARGS, module_args)
+ result = vol_object.tiering_control(current)
+ assert result == 'disallowed'
+ result = vol_object.tiering_control(current)
+ assert result == 'best_effort'
+ result = vol_object.tiering_control(current)
+ assert result == 'required'
+ current = {'aggregates': []}
+ result = vol_object.tiering_control(current)
+ assert result is None
+
+
+def test_error_snaplock_volume_create_sl_type_not_changed():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'storage/volumes', SRR['empty_records']),
+ ('GET', 'svm/svms', SRR['one_svm_record']),
+ ('POST', 'storage/volumes', SRR['empty_records']),
+ ('GET', 'storage/volumes', SRR['get_volume']),
+ ])
+ module_args = {'snaplock': {'type': 'enterprise'}}
+ error = 'Error: volume snaplock type was not set properly at creation time. Current: non_snaplock, desired: enterprise.'
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args, fail=True)['msg'] == error
+
+
+def test_error_snaplock_volume_create_sl_type_not_supported():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'storage/volumes', SRR['empty_records']),
+ ('GET', 'svm/svms', SRR['one_svm_record']),
+ ])
+ module_args = {'snaplock': {'type': 'enterprise'}}
+ error = 'Error: using snaplock type requires ONTAP 9.10.1 or later and REST must be enabled - ONTAP version: 9.6.0 - using REST.'
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args, fail=True)['msg'] == error
+
+
+def test_error_snaplock_volume_create_sl_options_not_supported_when_non_snaplock():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'storage/volumes', SRR['empty_records']),
+ ('GET', 'svm/svms', SRR['one_svm_record']),
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'storage/volumes', SRR['empty_records']),
+ ('GET', 'svm/svms', SRR['one_svm_record']),
+ ])
+ module_args = {'snaplock': {
+ 'type': 'non_snaplock',
+ 'retention': {'default': 'P30Y'}
+ }}
+ error = "Error: snaplock options are not supported for non_snaplock volume, found: {'retention': {'default': 'P30Y'}}."
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args, fail=True)['msg'] == error
+
+ # 'non_snaplock' is the default too
+ module_args = {'snaplock': {
+ 'retention': {'default': 'P30Y'}
+ }}
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args, fail=True)['msg'] == error
+
+
+def test_snaplock_volume_create():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'storage/volumes', SRR['empty_records']),
+ ('GET', 'svm/svms', SRR['one_svm_record']),
+ ('POST', 'storage/volumes', SRR['empty_records']),
+ ('GET', 'storage/volumes', SRR['get_volume_sl_enterprise']),
+ ])
+ module_args = {'snaplock': {'type': 'enterprise', 'retention': {'maximum': 'P5D'}}}
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args)['changed']
+
+
+def test_error_snaplock_volume_modify_type():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'storage/volumes', SRR['get_volume_sl_enterprise']),
+ ])
+ module_args = {'snaplock': {'type': 'compliance'}}
+ error = 'Error: changing a volume snaplock type after creation is not allowed. Current: enterprise, desired: compliance.'
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args, fail=True)['msg'] == error
+
+
+def test_snaplock_volume_modify_other_options():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'storage/volumes', SRR['get_volume_sl_enterprise']),
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['success']),
+ ])
+ module_args = {'snaplock': {
+ 'retention': {'default': 'P20Y'}
+ }}
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args)['changed']
+
+
+def test_snaplock_volume_modify_other_options_idempotent():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'storage/volumes', SRR['get_volume_sl_enterprise']),
+ ])
+ module_args = {'snaplock': {
+ 'retention': {'default': 'P30Y'}
+ }}
+ assert not create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args)['changed']
+
+
+def test_max_files_volume_modify():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'storage/volumes', SRR['get_volume_sl_enterprise']),
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['success']),
+ ])
+ module_args = {'max_files': 3000}
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, module_args)['changed']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.has_netapp_lib')
+def test_use_zapi_and_netapp_lib_missing(mock_has_netapp_lib):
+ """ZAPI requires netapp_lib"""
+ register_responses([
+ ])
+ mock_has_netapp_lib.return_value = False
+ module_args = {'use_rest': 'never'}
+ error = 'Error: the python NetApp-Lib module is required. Import error: None'
+ assert create_module(volume_module, DEFAULT_VOLUME_ARGS, module_args, fail=True)['msg'] == error
+
+
+def test_fallback_to_zapi_and_nas_application_is_used():
+ """fallback to ZAPI when use_rest: auto and some ZAPI only options are used"""
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ])
+ module_args = {'use_rest': 'auto', 'cutover_action': 'wait', 'nas_application_template': {'storage_service': 'value'}}
+ error = "Error: nas_application_template requires REST support. use_rest: auto. "\
+ "Conflict because of unsupported option(s) or option value(s) in REST: ['cutover_action']."
+ assert create_module(volume_module, DEFAULT_VOLUME_ARGS, module_args, fail=True)['msg'] == error
+ assert_warning_was_raised("Falling back to ZAPI because of unsupported option(s) or option value(s) in REST: ['cutover_action']")
+
+
+def test_fallback_to_zapi_and_rest_option_is_used():
+ """fallback to ZAPI when use_rest: auto and some ZAPI only options are used"""
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ])
+ module_args = {'use_rest': 'auto', 'cutover_action': 'wait', 'sizing_method': 'use_existing_resources'}
+ error = "Error: sizing_method option is not supported with ZAPI. It can only be used with REST. use_rest: auto. "\
+ "Conflict because of unsupported option(s) or option value(s) in REST: ['cutover_action']."
+ assert create_module(volume_module, DEFAULT_VOLUME_ARGS, module_args, fail=True)['msg'] == error
+ assert_warning_was_raised("Falling back to ZAPI because of unsupported option(s) or option value(s) in REST: ['cutover_action']")
+
+
+def test_error_conflict_export_policy_and_nfs_access():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ])
+ module_args = {
+ 'export_policy': 'auto',
+ 'nas_application_template': {
+ 'tiering': None,
+ 'nfs_access': [{'access': 'ro'}]
+ },
+ 'tiering_policy': 'backup'
+ }
+ error = 'Conflict: export_policy option and nfs_access suboption in nas_application_template are mutually exclusive.'
+ assert create_module(volume_module, DEFAULT_APP_ARGS, module_args, fail=True)['msg'] == error
+
+
+def test_create_nas_app_nfs_access():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'storage/volumes', SRR['no_record']), # Get Volume
+ ('GET', 'svm/svms', SRR['one_svm_record']),
+ ('GET', 'application/applications', SRR['no_record']), # GET application/applications
+ ('POST', 'application/applications', SRR['empty_good']), # POST application/applications
+ ('GET', 'storage/volumes', SRR['get_volume']),
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['get_volume']),
+ ])
+ module_args = {
+ 'nas_application_template': {
+ 'exclude_aggregates': ['aggr_ex'],
+ 'nfs_access': [{'access': 'ro'}],
+ 'tiering': None,
+ },
+ 'snapshot_policy': 'snspol'
+ }
+ assert create_and_apply(volume_module, DEFAULT_APP_ARGS, module_args)['changed']
+
+
+def test_create_nas_app_tiering_object_store():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'storage/volumes', SRR['no_record']), # Get Volume
+ ('GET', 'svm/svms', SRR['one_svm_record']),
+ ('GET', 'application/applications', SRR['no_record']), # GET application/applications
+ ('POST', 'application/applications', SRR['empty_good']), # POST application/applications
+ ('GET', 'storage/volumes', SRR['get_volume']),
+ ('GET', 'storage/aggregates/aggr1_uuid/cloud-stores', SRR['get_aggr_one_object_store']),
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['get_volume']),
+ ])
+ module_args = {
+ 'nas_application_template': {
+ 'flexcache': {
+ 'dr_cache': True,
+ 'origin_component_name': 'ocn',
+ 'origin_svm_name': 'osn',
+ },
+ 'storage_service': 'extreme',
+ 'tiering': {
+ 'control': 'required',
+ 'object_stores': ['obs1']
+ },
+ },
+ 'export_policy': 'exppol',
+ 'qos_policy_group': 'qospol',
+ 'snapshot_policy': 'snspol'
+ }
+ assert create_and_apply(volume_module, DEFAULT_APP_ARGS, module_args)['changed']
+
+
+def test_create_nas_app_tiering_policy_flexcache():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'storage/volumes', SRR['no_record']), # Get Volume
+ ('GET', 'svm/svms', SRR['one_svm_record']),
+ ('GET', 'application/applications', SRR['no_record']), # GET application/applications
+ ('POST', 'application/applications', SRR['empty_good']), # POST application/applications
+ ('GET', 'storage/volumes', SRR['get_volume']),
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['get_volume']),
+ ])
+ module_args = {
+ 'nas_application_template': {
+ 'flexcache': {
+ 'dr_cache': True,
+ 'origin_component_name': 'ocn',
+ 'origin_svm_name': 'osn',
+ },
+ 'storage_service': 'extreme',
+ },
+ 'qos_policy_group': 'qospol',
+ 'snapshot_policy': 'snspol',
+ 'tiering_policy': 'snapshot-only',
+ }
+ assert create_and_apply(volume_module, DEFAULT_APP_ARGS, module_args)['changed']
+
+
+def test_create_nas_app_tiering_flexcache():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'storage/volumes', SRR['no_record']), # Get Volume
+ ('GET', 'svm/svms', SRR['one_svm_record']),
+ ('GET', 'application/applications', SRR['no_record']), # GET application/applications
+ ('POST', 'application/applications', SRR['empty_good']), # POST application/applications
+ ('GET', 'storage/volumes', SRR['get_volume']),
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['get_volume']),
+ ])
+ module_args = {
+ 'nas_application_template': {
+ 'flexcache': {
+ 'dr_cache': True,
+ 'origin_component_name': 'ocn',
+ 'origin_svm_name': 'osn',
+ },
+ 'storage_service': 'extreme',
+ 'tiering': {
+ 'control': 'best_effort'
+ },
+ },
+ 'qos_policy_group': 'qospol',
+ 'snapshot_policy': 'snspol'
+ }
+ assert create_and_apply(volume_module, DEFAULT_APP_ARGS, module_args)['changed']
+
+
+def test_version_error_nas_app():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ])
+ module_args = {
+ 'nas_application_template': {
+ 'flexcache': {
+ 'dr_cache': True,
+ 'origin_component_name': 'ocn',
+ 'origin_svm_name': 'osn',
+ },
+ },
+ }
+ error = 'Error: using nas_application_template requires ONTAP 9.7 or later and REST must be enabled - ONTAP version: 9.6.0.'
+ assert create_module(volume_module, DEFAULT_APP_ARGS, module_args, fail=True)['msg'] == error
+
+
+def test_version_error_nas_app_dr_cache():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ])
+ module_args = {
+ 'nas_application_template': {
+ 'flexcache': {
+ 'dr_cache': True,
+ 'origin_component_name': 'ocn',
+ 'origin_svm_name': 'osn',
+ },
+ },
+ }
+ error = 'Error: using flexcache: dr_cache requires ONTAP 9.9 or later and REST must be enabled - ONTAP version: 9.8.0.'
+ assert create_module(volume_module, DEFAULT_APP_ARGS, module_args, fail=True)['msg'] == error
+
+
+def test_error_volume_rest_patch():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ])
+ my_obj = create_module(volume_module, DEFAULT_APP_ARGS)
+ my_obj.parameters['uuid'] = None
+ error = 'Could not read UUID for volume test_svm in patch.'
+ assert expect_and_capture_ansible_exception(my_obj.volume_rest_patch, 'fail', {})['msg'] == error
+
+
+def test_error_volume_rest_delete():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ])
+ my_obj = create_module(volume_module, DEFAULT_APP_ARGS)
+ my_obj.parameters['uuid'] = None
+ error = 'Could not read UUID for volume test_svm in delete.'
+ assert expect_and_capture_ansible_exception(my_obj.rest_delete_volume, 'fail', '')['msg'] == error
+
+
+def test_error_modify_app_not_supported_no_volume_but_app():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'storage/volumes', SRR['no_record']),
+ ('GET', 'svm/svms', SRR['one_svm_record']),
+ ('GET', 'application/applications', SRR['nas_app_record']),
+ ('GET', 'application/applications/09e9fd5e-8ebd-11e9-b162-005056b39fe7', SRR['nas_app_record_by_uuid']),
+ ])
+ module_args = {}
+ # TODO: we need to handle this error case with a better error mssage
+ error = \
+ 'Error in create_nas_application: function create_application should not be called when application uuid is set: 09e9fd5e-8ebd-11e9-b162-005056b39fe7.'
+ assert create_and_apply(volume_module, DEFAULT_APP_ARGS, module_args, fail=True)['msg'] == error
+
+
+def test_warning_modify_app_not_supported():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'storage/volumes', SRR['get_volume']),
+ ('GET', 'application/applications', SRR['nas_app_record']),
+ ('GET', 'application/applications/09e9fd5e-8ebd-11e9-b162-005056b39fe7', SRR['nas_app_record_by_uuid']),
+ ])
+ module_args = {
+ 'nas_application_template': {
+ 'flexcache': {
+ 'dr_cache': True,
+ 'origin_component_name': 'ocn',
+ 'origin_svm_name': 'osn',
+ },
+ },
+ }
+ assert not create_and_apply(volume_module, DEFAULT_APP_ARGS, module_args)['changed']
+ assert_warning_was_raised("Modifying an app is not supported at present: ignoring: {'flexcache': {'origin': {'svm': {'name': 'osn'}}}}")
+
+
+def test_create_flexgroup_volume_from_main():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['no_record']), # Get Volume
+ ('GET', 'svm/svms', SRR['one_svm_record']),
+ ('POST', 'storage/volumes', SRR['no_record']), # Create Volume
+ ('GET', 'storage/volumes', SRR['get_volume']),
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['no_record']), # eff policy
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['no_record']), # modify
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['no_record']), # offline
+ ])
+ args = copy.deepcopy(DEFAULT_VOLUME_ARGS)
+ del args['aggregate_name']
+ module_args = {
+ 'aggr_list': 'aggr_0,aggr_1',
+ 'aggr_list_multiplier': 2,
+ 'comment': 'some comment',
+ 'compression': False,
+ 'efficiency_policy': 'effpol',
+ 'export_policy': 'exppol',
+ 'group_id': 1001,
+ 'junction_path': '/this/path',
+ 'inline_compression': False,
+ 'is_online': False,
+ 'language': 'us',
+ 'percent_snapshot_space': 10,
+ 'snapshot_policy': 'snspol',
+ 'space_guarantee': 'file',
+ 'tiering_minimum_cooling_days': 30,
+ 'tiering_policy': 'snapshot-only',
+ 'type': 'rw',
+ 'user_id': 123,
+ 'volume_security_style': 'unix',
+ }
+ assert call_main(my_main, args, module_args)['changed']
+
+
+def test_get_volume_style():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ])
+ args = copy.deepcopy(DEFAULT_VOLUME_ARGS)
+ del args['aggregate_name']
+ module_args = {
+ 'auto_provision_as': 'flexgroup',
+ }
+ my_obj = create_module(volume_module, args, module_args)
+ assert my_obj.get_volume_style(None) == 'flexgroup'
+ assert my_obj.parameters.get('aggr_list_multiplier') == 1
+
+
+def test_move_volume_with_rest_passthrough():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('PATCH', 'private/cli/volume/move/start', SRR['success']),
+ ('PATCH', 'private/cli/volume/move/start', SRR['generic_error']),
+ ])
+ module_args = {
+ 'aggregate_name': 'aggr2'
+ }
+ obj = create_module(volume_module, DEFAULT_VOLUME_ARGS, module_args)
+ error = obj.move_volume_with_rest_passthrough(True)
+ assert error is None
+ error = obj.move_volume_with_rest_passthrough(True)
+ assert 'Expected error' in error
+
+
+def test_ignore_small_change():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ])
+ obj = create_module(volume_module, DEFAULT_VOLUME_ARGS)
+ obj.parameters['attribute'] = 51
+ assert obj.ignore_small_change({'attribute': 50}, 'attribute', .5) is None
+ assert obj.parameters['attribute'] == 51
+ assert_no_warnings()
+ obj.parameters['attribute'] = 50.2
+ assert obj.ignore_small_change({'attribute': 50}, 'attribute', .5) is None
+ assert obj.parameters['attribute'] == 50
+ print_warnings()
+ assert_warning_was_raised('resize request for attribute ignored: 0.4% is below the threshold: 0.5%')
+
+
+def test_set_efficiency_rest_empty_body():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ])
+ obj = create_module(volume_module, DEFAULT_VOLUME_ARGS)
+ # no action
+ assert obj.set_efficiency_rest() is None
+
+
+@patch('time.sleep')
+def test_volume_move_rest(sleep):
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'storage/volumes', SRR['get_volume_mount']),
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['success']),
+ ('GET', 'storage/volumes', SRR['move_state_replicating']),
+ ('GET', 'storage/volumes', SRR['move_state_success']),
+ # error when trying to get volume status
+ ('GET', 'cluster', SRR['is_rest_9_8_0']),
+ ('GET', 'storage/volumes', SRR['get_volume_mount']),
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['success']),
+ ('GET', 'storage/volumes', SRR['generic_error']),
+ ('GET', 'storage/volumes', SRR['generic_error']),
+ ('GET', 'storage/volumes', SRR['generic_error']),
+ ('GET', 'storage/volumes', SRR['generic_error'])
+ ])
+ args = {'aggregate_name': 'aggr2', 'wait_for_completion': True, 'max_wait_time': 280}
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, args)['changed']
+ error = "Error getting volume move status"
+ assert error in create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, args, fail=True)['msg']
+
+
+def test_analytics_option():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['no_record']),
+ ('GET', 'svm/svms', SRR['one_svm_record']),
+ ('POST', 'storage/volumes', SRR['success']),
+ ('GET', 'storage/volumes', SRR['get_volume']),
+ # idempotency check
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_volume']),
+ # Disable analytics
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['get_volume']),
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['success']),
+ # Enable analytics
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['analytics_off']),
+ ('PATCH', 'storage/volumes/7882901a-1aef-11ec-a267-005056b30cfa', SRR['success']),
+ # Try to Enable analytics which is initializing(no change required.)
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['analytics_initializing'])
+ ])
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, {'analytics': 'on'})['changed']
+ assert not create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, {'analytics': 'on'})['changed']
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, {'analytics': 'off'})['changed']
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, {'analytics': 'on'})['changed']
+ assert not create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, {'analytics': 'on'})['changed']
+
+
+def test_warn_rest_modify():
+ """ Test skip snapshot_restore and modify when volume is offline """
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'storage/volumes', SRR['volume_info_offline'])
+ ])
+ args = {'is_online': False, 'junction_path': '/test', 'use_rest': 'always', 'snapshot_restore': 'restore1'}
+ assert create_and_apply(volume_module, DEFAULT_VOLUME_ARGS, args)['changed'] is False
+ assert_warning_was_raised("Cannot perform action(s): ['snapshot_restore'] and modify: ['junction_path']", partial_match=True)
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_volume_snaplock.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_volume_snaplock.py
new file mode 100644
index 000000000..0c836233f
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_volume_snaplock.py
@@ -0,0 +1,131 @@
+# (c) 2020, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+""" unit tests for Ansible module: na_ontap_volume_snaplock """
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat import unittest
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_volume_snaplock \
+ import NetAppOntapVolumeSnaplock as snaplock_module # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+class MockONTAPConnection(object):
+ ''' mock server connection to ONTAP host '''
+
+ def __init__(self, kind=None, data=None):
+ ''' save arguments '''
+ self.type = kind
+ self.params = data
+ self.xml_in = None
+ self.xml_out = None
+
+ def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
+ ''' mock invoke_successfully returning xml data '''
+ self.xml_in = xml
+ if self.type == 'snaplock':
+ xml = self.build_snaplock_info(self.params)
+ elif self.type == 'zapi_error':
+ error = netapp_utils.zapi.NaApiError('test', 'error')
+ raise error
+ self.xml_out = xml
+ return xml
+
+ @staticmethod
+ def build_snaplock_info(data):
+ ''' build xml data for vserser-info '''
+ xml = netapp_utils.zapi.NaElement('xml')
+ attributes = {'snaplock-attrs': {
+ 'snaplock-attrs-info': {
+ 'autocommit-period': data['autocommit_period'],
+ 'default-retention-period': data['default_retention_period'],
+ 'maximum-retention-period': data['maximum_retention_period'],
+ 'minimum-retention-period': data['minimum_retention_period'],
+ 'is-volume-append-mode-enabled': data['is_volume_append_mode_enabled']
+ }
+ }}
+ xml.translate_struct(attributes)
+ return xml
+
+
+class TestMyModule(unittest.TestCase):
+ ''' a group of related Unit Tests '''
+
+ def setUp(self):
+ self.mock_snaplock = {
+ 'autocommit_period': '10days',
+ 'default_retention_period': '1years',
+ 'maximum_retention_period': '2years',
+ 'minimum_retention_period': '6months',
+ 'is_volume_append_mode_enabled': 'false'
+ }
+
+ def mock_args(self):
+ return {
+ 'name': 'test_volume',
+ 'autocommit_period': self.mock_snaplock['autocommit_period'],
+ 'default_retention_period': self.mock_snaplock['default_retention_period'],
+ 'maximum_retention_period': self.mock_snaplock['maximum_retention_period'],
+ 'minimum_retention_period': self.mock_snaplock['minimum_retention_period'],
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'vserver': 'test_vserver'
+ }
+
+ def get_snaplock_mock_object(self, kind=None):
+ """
+ Helper method to return an na_ontap_volume_snaplock object
+ :param kind: passes this param to MockONTAPConnection()
+ :return: na_ontap_volume_snaplock object
+ """
+ snaplock_obj = snaplock_module()
+ if kind is None:
+ snaplock_obj.server = MockONTAPConnection()
+ else:
+ snaplock_obj.server = MockONTAPConnection(kind=kind, data=self.mock_snaplock)
+ return snaplock_obj
+
+ def test_module_fail_when_required_args_missing(self):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ snaplock_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+ def test_get_existing_snaplock(self):
+ set_module_args(self.mock_args())
+ result = self.get_snaplock_mock_object(kind='snaplock').get_volume_snaplock_attrs()
+ assert result['autocommit_period'] == self.mock_snaplock['autocommit_period']
+ assert result['default_retention_period'] == self.mock_snaplock['default_retention_period']
+ assert result['is_volume_append_mode_enabled'] is False
+ assert result['maximum_retention_period'] == self.mock_snaplock['maximum_retention_period']
+
+ def test_modify_snaplock(self):
+ data = self.mock_args()
+ data['maximum_retention_period'] = '5years'
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_snaplock_mock_object('snaplock').apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_volume_snaplock.NetAppOntapVolumeSnaplock.get_volume_snaplock_attrs')
+ def test_modify_snaplock_error(self, get_volume_snaplock_attrs):
+ data = self.mock_args()
+ data['maximum_retention_period'] = '5years'
+ set_module_args(data)
+ get_volume_snaplock_attrs.side_effect = [self.mock_snaplock]
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_snaplock_mock_object('zapi_error').apply()
+ assert exc.value.args[0]['msg'] == 'Error setting snaplock attributes for volume test_volume : NetApp API failed. Reason - test:error'
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_vscan.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_vscan.py
new file mode 100644
index 000000000..924865507
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_vscan.py
@@ -0,0 +1,200 @@
+# (c) 2018, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests for Ansible module: na_ontap_vscan'''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat import unittest
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_vscan \
+ import NetAppOntapVscan as vscan_module # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+HAS_NETAPP_ZAPI_MSG = "pip install netapp_lib is required"
+
+
+# REST API canned responses when mocking send_request
+SRR = {
+ # common responses
+ 'is_rest': (200, {}, None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'end_of_sequence': (500, None, "Ooops, the UT needs one more SRR response"),
+ 'generic_error': (400, None, "Expected error"),
+ # module specific responses
+ 'enabled': (200, {'records': [{'enabled': True, 'svm': {'uuid': 'testuuid'}}]}, None),
+ 'disabled': (200, {'records': [{'enabled': False, 'svm': {'uuid': 'testuuid'}}]}, None),
+}
+
+
+class MockONTAPConnection(object):
+ ''' mock server connection to ONTAP host '''
+
+ def __init__(self, kind=None, data=None):
+ ''' save arguments '''
+ self.kind = kind
+ self.params = data
+ self.xml_in = None
+ self.xml_out = None
+
+ def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
+ ''' mock invoke_successfully returning xml data '''
+ self.xml_in = xml
+ if self.kind == 'enable':
+ xml = self.build_vscan_status_info(self.params)
+ self.xml_out = xml
+ return xml
+
+ @staticmethod
+ def build_vscan_status_info(status):
+ xml = netapp_utils.zapi.NaElement('xml')
+ attributes = {'num-records': 1,
+ 'attributes-list': {'vscan-status-info': {'is-vscan-enabled': status}}}
+ xml.translate_struct(attributes)
+ return xml
+
+
+class TestMyModule(unittest.TestCase):
+ ''' Unit tests for na_ontap_job_schedule '''
+
+ def mock_args(self):
+ return {
+ 'enable': False,
+ 'vserver': 'vserver',
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!'
+ }
+
+ def get_vscan_mock_object(self, cx_type='zapi', kind=None, status=None):
+ vscan_obj = vscan_module()
+ if cx_type == 'zapi':
+ if kind is None:
+ vscan_obj.server = MockONTAPConnection()
+ else:
+ vscan_obj.server = MockONTAPConnection(kind=kind, data=status)
+ # For rest, mocking is achieved through side_effect
+ return vscan_obj
+
+ def test_module_fail_when_required_args_missing(self):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ vscan_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+ def test_successfully_enable(self):
+ data = self.mock_args()
+ data['enable'] = True
+ data['use_rest'] = 'never'
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_vscan_mock_object('zapi', 'enable', 'false').apply()
+ assert exc.value.args[0]['changed']
+
+ def test_idempotently_enable(self):
+ data = self.mock_args()
+ data['enable'] = True
+ data['use_rest'] = 'never'
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_vscan_mock_object('zapi', 'enable', 'true').apply()
+ assert not exc.value.args[0]['changed']
+
+ def test_successfully_disable(self):
+ data = self.mock_args()
+ data['enable'] = False
+ data['use_rest'] = 'never'
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_vscan_mock_object('zapi', 'enable', 'true').apply()
+ assert exc.value.args[0]['changed']
+
+ def test_idempotently_disable(self):
+ data = self.mock_args()
+ data['enable'] = False
+ data['use_rest'] = 'never'
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_vscan_mock_object('zapi', 'enable', 'false').apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_error(self, mock_request):
+ data = self.mock_args()
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['generic_error'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_vscan_mock_object(cx_type='rest').apply()
+ assert exc.value.args[0]['msg'] == SRR['generic_error'][2]
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_successly_enable(self, mock_request):
+ data = self.mock_args()
+ data['enable'] = True
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['disabled'],
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_vscan_mock_object(cx_type='rest').apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_idempotently_enable(self, mock_request):
+ data = self.mock_args()
+ data['enable'] = True
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['enabled'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_vscan_mock_object(cx_type='rest').apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_successly_disable(self, mock_request):
+ data = self.mock_args()
+ data['enable'] = False
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['enabled'],
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_vscan_mock_object(cx_type='rest').apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_idempotently_disable(self, mock_request):
+ data = self.mock_args()
+ data['enable'] = False
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['disabled'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_vscan_mock_object(cx_type='rest').apply()
+ assert not exc.value.args[0]['changed']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_vscan_on_access_policy.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_vscan_on_access_policy.py
new file mode 100644
index 000000000..d5228c1cc
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_vscan_on_access_policy.py
@@ -0,0 +1,348 @@
+# (c) 2018-2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests for Ansible module: na_ontap_vscan_scanner_pool '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+import sys
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import patch_ansible,\
+ create_module, create_and_apply, expect_and_capture_ansible_exception
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke,\
+ register_responses
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_vscan_on_access_policy \
+ import NetAppOntapVscanOnAccessPolicy as policy_module # module under test
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+
+DEFAULT_ARGS = {
+ 'state': 'present',
+ 'vserver': 'test_vserver',
+ 'policy_name': 'test_carchi',
+ 'max_file_size': 2147483648 + 1, # 2GB + 1
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'use_rest': 'never'
+}
+
+
+vscan_info = {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'vscan-on-access-policy-info': {
+ 'policy-name': 'test_carchi',
+ 'vserver': 'test_vserver',
+ 'max-file-size': 2147483648 + 1,
+ 'is-scan-mandatory': 'false',
+ 'scan-files-with-no-ext': 'true',
+ 'is-policy-enabled': 'true',
+ 'file-ext-to-include': ['py']
+ }
+ }
+}
+
+
+ZRR = zapi_responses({
+ 'vscan_info': build_zapi_response(vscan_info)
+})
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ # with python 2.6, dictionaries are not ordered
+ fragments = ["missing required arguments:", "hostname", "policy_name", "vserver"]
+ error = create_module(policy_module, {}, fail=True)['msg']
+ for fragment in fragments:
+ assert fragment in error
+
+
+def test_get_nonexistent_policy():
+ register_responses([
+ ('vscan-on-access-policy-get-iter', ZRR['empty'])
+ ])
+ policy_obj = create_module(policy_module, DEFAULT_ARGS)
+ result = policy_obj.get_on_access_policy()
+ assert result is None
+
+
+def test_get_existing_scanner():
+ register_responses([
+ ('vscan-on-access-policy-get-iter', ZRR['vscan_info'])
+ ])
+ policy_obj = create_module(policy_module, DEFAULT_ARGS)
+ result = policy_obj.get_on_access_policy()
+ assert result
+
+
+def test_successfully_create():
+ register_responses([
+ ('vscan-on-access-policy-get-iter', ZRR['empty']),
+ ('vscan-on-access-policy-create', ZRR['success'])
+ ])
+ assert create_and_apply(policy_module, DEFAULT_ARGS)['changed']
+
+
+def test_create_idempotency():
+ register_responses([
+ ('vscan-on-access-policy-get-iter', ZRR['vscan_info'])
+ ])
+ assert create_and_apply(policy_module, DEFAULT_ARGS)['changed'] is False
+
+
+def test_successfully_delete():
+ register_responses([
+ ('vscan-on-access-policy-get-iter', ZRR['vscan_info']),
+ ('vscan-on-access-policy-delete', ZRR['success'])
+ ])
+ assert create_and_apply(policy_module, DEFAULT_ARGS, {'state': 'absent'})['changed']
+
+
+def test_delete_idempotency():
+ register_responses([
+ ('vscan-on-access-policy-get-iter', ZRR['empty'])
+ ])
+ assert create_and_apply(policy_module, DEFAULT_ARGS, {'state': 'absent'})['changed'] is False
+
+
+def test_successfully_create_and_enable_policy():
+ register_responses([
+ ('vscan-on-access-policy-get-iter', ZRR['empty']),
+ ('vscan-on-access-policy-create', ZRR['success']),
+ ('vscan-on-access-policy-status-modify', ZRR['success'])
+ ])
+ args = {'policy_status': True}
+ assert create_and_apply(policy_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_disable_policy_and_delete():
+ register_responses([
+ ('vscan-on-access-policy-get-iter', ZRR['vscan_info']),
+ ('vscan-on-access-policy-status-modify', ZRR['success']),
+ ('vscan-on-access-policy-delete', ZRR['success'])
+ ])
+ args = {'policy_status': False, 'state': 'absent'}
+ assert create_and_apply(policy_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_modify_policy():
+ register_responses([
+ ('vscan-on-access-policy-get-iter', ZRR['vscan_info']),
+ ('vscan-on-access-policy-modify', ZRR['success'])
+ ])
+ args = {'max_file_size': 2147483650}
+ assert create_and_apply(policy_module, DEFAULT_ARGS, args)['changed']
+
+
+def test_modify_files_to_incluse_empty_error():
+ args = {'file_ext_to_include': []}
+ msg = 'Error: The value for file_ext_include cannot be empty'
+ assert msg in create_module(policy_module, DEFAULT_ARGS, args, fail=True)['msg']
+
+
+def module_error_disable_policy():
+ register_responses([
+ ('vscan-on-access-policy-get-iter', ZRR['vscan_info']),
+ ('vscan-on-access-policy-status-modify', ZRR['error'])
+ ])
+ args = {'policy_status': False}
+ error = create_and_apply(policy_module, DEFAULT_ARGS, args, fail=True)['msg']
+ assert 'Error modifying status Vscan on Access Policy' in error
+
+
+def test_if_all_methods_catch_exception():
+ register_responses([
+ ('vscan-on-access-policy-get-iter', ZRR['error']),
+ ('vscan-on-access-policy-create', ZRR['error']),
+ ('vscan-on-access-policy-modify', ZRR['error']),
+ ('vscan-on-access-policy-delete', ZRR['error']),
+ ])
+
+ policy_obj = create_module(policy_module, DEFAULT_ARGS)
+
+ error = expect_and_capture_ansible_exception(policy_obj.get_on_access_policy, 'fail')['msg']
+ assert 'Error searching Vscan on Access Policy' in error
+
+ error = expect_and_capture_ansible_exception(policy_obj.create_on_access_policy, 'fail')['msg']
+ assert 'Error creating Vscan on Access Policy' in error
+
+ error = expect_and_capture_ansible_exception(policy_obj.modify_on_access_policy, 'fail')['msg']
+ assert 'Error Modifying Vscan on Access Policy' in error
+
+ error = expect_and_capture_ansible_exception(policy_obj.delete_on_access_policy, 'fail')['msg']
+ assert 'Error Deleting Vscan on Access Policy' in error
+
+
+DEFAULT_ARGS_REST = {
+ "policy_name": "custom_CIFS",
+ "policy_status": True,
+ "file_ext_to_exclude": ["exe", "yml", "py"],
+ "file_ext_to_include": ['txt', 'json'],
+ "scan_readonly_volumes": True,
+ "only_execute_access": False,
+ "is_scan_mandatory": True,
+ "paths_to_exclude": ['\folder1', '\folder2'],
+ "scan_files_with_no_ext": True,
+ "max_file_size": 2147483648,
+ "vserver": "vscan-test",
+ "hostname": "test",
+ "username": "test_user",
+ "password": "test_pass",
+ "use_rest": "always"
+}
+
+
+SRR = rest_responses({
+ 'vscan_on_access_policy': (200, {"records": [
+ {
+ "svm": {"name": "vscan-test"},
+ "name": "custom_CIFS",
+ "enabled": True,
+ "mandatory": True,
+ "scope": {
+ "max_file_size": 2147483648,
+ "exclude_paths": ["\folder1", "\folder2"],
+ "include_extensions": ["txt", "json"],
+ "exclude_extensions": ["exe", "yml", "py"],
+ "scan_without_extension": True,
+ "scan_readonly_volumes": True,
+ "only_execute_access": False
+ }
+ }
+ ], "num_records": 1}, None),
+ 'svm_uuid': (200, {"records": [
+ {
+ 'uuid': 'e3cb5c7f-cd20'
+ }], "num_records": 1}, None)
+})
+
+
+def test_successfully_create_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'svm/svms', SRR['svm_uuid']),
+ ('GET', 'protocols/vscan/e3cb5c7f-cd20/on-access-policies', SRR['empty_records']),
+ ('POST', 'protocols/vscan/e3cb5c7f-cd20/on-access-policies', SRR['success'])
+ ])
+ assert create_and_apply(policy_module, DEFAULT_ARGS_REST)['changed']
+
+
+def test_successfully_create_rest_idempotency():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'svm/svms', SRR['svm_uuid']),
+ ('GET', 'protocols/vscan/e3cb5c7f-cd20/on-access-policies', SRR['vscan_on_access_policy'])
+ ])
+ assert create_and_apply(policy_module, DEFAULT_ARGS_REST)['changed'] is False
+
+
+def test_modify_policy_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'svm/svms', SRR['svm_uuid']),
+ ('GET', 'protocols/vscan/e3cb5c7f-cd20/on-access-policies', SRR['vscan_on_access_policy']),
+ ('PATCH', 'protocols/vscan/e3cb5c7f-cd20/on-access-policies/custom_CIFS', SRR['success'])
+ ])
+ args = {
+ "policy_status": False,
+ "file_ext_to_exclude": ['yml'],
+ "file_ext_to_include": ['json'],
+ "scan_readonly_volumes": False,
+ "only_execute_access": True,
+ "is_scan_mandatory": False,
+ "paths_to_exclude": ['\folder1'],
+ "scan_files_with_no_ext": False,
+ "max_file_size": 2147483649
+ }
+ assert create_and_apply(policy_module, DEFAULT_ARGS_REST, args)['changed']
+
+
+def test_disable_and_delete_policy_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'svm/svms', SRR['svm_uuid']),
+ ('GET', 'protocols/vscan/e3cb5c7f-cd20/on-access-policies', SRR['vscan_on_access_policy']),
+ ('PATCH', 'protocols/vscan/e3cb5c7f-cd20/on-access-policies/custom_CIFS', SRR['success']),
+ ('DELETE', 'protocols/vscan/e3cb5c7f-cd20/on-access-policies/custom_CIFS', SRR['success'])
+ ])
+ args = {
+ 'state': 'absent',
+ 'policy_status': False
+ }
+ assert create_and_apply(policy_module, DEFAULT_ARGS_REST, args)['changed']
+
+
+def test_delete_idempotent():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'svm/svms', SRR['svm_uuid']),
+ ('GET', 'protocols/vscan/e3cb5c7f-cd20/on-access-policies', SRR['empty_records'])
+ ])
+ args = {
+ 'state': 'absent'
+ }
+ assert create_and_apply(policy_module, DEFAULT_ARGS_REST, args)['changed'] is False
+
+
+def test_get_vserver_not_found():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'svm/svms', SRR['empty_records'])
+ ])
+ msg = 'Error vserver vscan-test does not exist or is not a data vserver.'
+ assert msg in create_and_apply(policy_module, DEFAULT_ARGS_REST, fail=True)['msg']
+
+
+def test_invalid_option_error_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0'])
+ ])
+ args = {'paths_to_exclude': [""]}
+ msg = 'Error: Invalid value specified for option(s)'
+ assert msg in create_module(policy_module, DEFAULT_ARGS_REST, args, fail=True)['msg']
+
+
+def test_get_error_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'svm/svms', SRR['svm_uuid']),
+ ('GET', 'protocols/vscan/e3cb5c7f-cd20/on-access-policies', SRR['generic_error'])
+ ])
+ msg = 'Error searching Vscan on Access Policy'
+ assert msg in create_and_apply(policy_module, DEFAULT_ARGS_REST, fail=True)['msg']
+
+
+def test_if_all_methods_catch_exception_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'svm/svms', SRR['generic_error']),
+ ('POST', 'protocols/vscan/e3cb5c7f-cd20/on-access-policies', SRR['generic_error']),
+ ('PATCH', 'protocols/vscan/e3cb5c7f-cd20/on-access-policies/custom_CIFS', SRR['generic_error']),
+ ('DELETE', 'protocols/vscan/e3cb5c7f-cd20/on-access-policies/custom_CIFS', SRR['generic_error'])
+ ])
+
+ policy_obj = create_module(policy_module, DEFAULT_ARGS_REST)
+ policy_obj.svm_uuid = "e3cb5c7f-cd20"
+
+ msg = 'calling: svm/svms: got Expected error.'
+ assert msg in expect_and_capture_ansible_exception(policy_obj.get_svm_uuid, 'fail')['msg']
+
+ msg = 'Error creating Vscan on Access Policy'
+ assert msg in expect_and_capture_ansible_exception(policy_obj.create_on_access_policy_rest, 'fail')['msg']
+
+ msg = 'Error Modifying Vscan on Access Policy'
+ assert msg in expect_and_capture_ansible_exception(policy_obj.modify_on_access_policy_rest, 'fail', {"policy_status": False})['msg']
+
+ msg = 'Error Deleting Vscan on Access Policy'
+ assert msg in expect_and_capture_ansible_exception(policy_obj.delete_on_access_policy_rest, 'fail')['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_vscan_on_demand_task.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_vscan_on_demand_task.py
new file mode 100644
index 000000000..8060cef9a
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_vscan_on_demand_task.py
@@ -0,0 +1,135 @@
+''' unit tests for Ansible module: na_ontap_vscan_on_demand_task '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat import unittest
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import Mock
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_vscan_on_demand_task \
+ import NetAppOntapVscanOnDemandTask as onDemand_module # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+class MockONTAPConnection(object):
+ ''' mock server connection to ONTAP host '''
+
+ def __init__(self, kind=None, data=None):
+ ''' save arguments '''
+ self.kind = kind
+ self.params = data
+ self.xml_in = None
+ self.xml_out = None
+
+ def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
+ ''' mock invoke_successfully returning xml data '''
+ self.xml_in = xml
+ if self.kind == 'task':
+ xml = self.build_onDemand_pool_info(self.params)
+ self.xml_out = xml
+ return xml
+
+ @staticmethod
+ def build_onDemand_pool_info(onDemand_details):
+ xml = netapp_utils.zapi.NaElement('xml')
+ attributes = {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'vscan-on-demand-task-info': {
+ 'task-name': onDemand_details['task_name'],
+ 'report-directory': onDemand_details['report_directory'],
+ 'scan-paths': {
+ 'string': onDemand_details['scan_paths']
+ }
+ }
+ }
+ }
+ xml.translate_struct(attributes)
+ return xml
+
+
+class TestMyModule(unittest.TestCase):
+ ''' Unit tests for na_ontap_job_schedule '''
+
+ def setUp(self):
+ self.mock_onDemand = {
+ 'state': 'present',
+ 'vserver': 'test_vserver',
+ 'report_directory': '/',
+ 'task_name': '/',
+ 'scan_paths': '/'
+ }
+
+ def mock_args(self):
+ return {
+ 'state': self.mock_onDemand['state'],
+ 'vserver': self.mock_onDemand['vserver'],
+ 'report_directory': self.mock_onDemand['report_directory'],
+ 'task_name': self.mock_onDemand['task_name'],
+ 'scan_paths': self.mock_onDemand['scan_paths'],
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'use_rest': 'never'
+ }
+
+ def get_demand_mock_object(self, kind=None):
+ scanner_obj = onDemand_module()
+ scanner_obj.asup_log_for_cserver = Mock(return_value=None)
+ if kind is None:
+ scanner_obj.server = MockONTAPConnection()
+ else:
+ scanner_obj.server = MockONTAPConnection(kind='task', data=self.mock_onDemand)
+ return scanner_obj
+
+ def test_module_fail_when_required_args_missing(self):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ onDemand_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+ def test_get_nonexistent_demand_task(self):
+ set_module_args(self.mock_args())
+ result = self.get_demand_mock_object().get_demand_task()
+ assert not result
+
+ def test_get_existing_demand_task(self):
+ set_module_args(self.mock_args())
+ result = self.get_demand_mock_object('task').get_demand_task()
+ assert result
+
+ def test_successfully_create(self):
+ set_module_args(self.mock_args())
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_demand_mock_object().apply()
+ assert exc.value.args[0]['changed']
+
+ def test_create_idempotency(self):
+ set_module_args(self.mock_args())
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_demand_mock_object('task').apply()
+ assert not exc.value.args[0]['changed']
+
+ def test_successfully_delete(self):
+ data = self.mock_args()
+ data['state'] = 'absent'
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_demand_mock_object('task').apply()
+ assert exc.value.args[0]['changed']
+
+ def test_delete_idempotency(self):
+ data = self.mock_args()
+ data['state'] = 'absent'
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_demand_mock_object().apply()
+ assert not exc.value.args[0]['changed']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_vscan_on_demand_task_rest.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_vscan_on_demand_task_rest.py
new file mode 100644
index 000000000..0630bdff7
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_vscan_on_demand_task_rest.py
@@ -0,0 +1,184 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import sys
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args, \
+ patch_ansible, create_and_apply, create_module, expect_and_capture_ansible_exception, AnsibleFailJson
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import get_mock_record, \
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_vscan_on_demand_task \
+ import NetAppOntapVscanOnDemandTask as my_module, main as my_main # module under test
+
+# needed for get and modify/delete as they still use ZAPI
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+# REST API canned responses when mocking send_request
+SRR = rest_responses({
+ 'on_demand_task': (200, {"records": [
+ {
+ "log_path": "/vol0/report_dir",
+ "scan_paths": [
+ "/vol1/",
+ "/vol2/cifs/"
+ ],
+ "name": "task-1",
+ "svm": {
+ "name": "svm1",
+ "uuid": "02c9e252-41be-11e9-81d5-00a0986138f7"
+ },
+ "scope": {
+ "exclude_paths": [
+ "/vol1/cold-files/",
+ "/vol1/cifs/names"
+ ],
+ "scan_without_extension": True,
+ "include_extensions": [
+ "vmdk",
+ "mp*"
+ ],
+ "exclude_extensions": [
+ "mp3",
+ "mp4"
+ ],
+ "max_file_size": "10737418240"
+ },
+ "schedule": {
+ "name": "weekly",
+ "uuid": "1cd8a442-86d1-11e0-ae1c-123478563412"
+ }
+ }
+ ]}, None),
+ 'svm_info': (200, {
+ "uuid": "1cd8a442-86d1-11e0-ae1c-123478563412",
+ "name": "svm1",
+ }, None),
+})
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'vserver': 'svm1',
+ 'use_rest': 'always',
+ 'task_name': 'carchi8pytask',
+ 'scan_paths': ['/vol/vol1/'],
+ 'report_directory': '/',
+}
+
+
+def test_get_svm_uuid():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'svm/svms', SRR['svm_info'])
+ ])
+ set_module_args(DEFAULT_ARGS)
+ my_obj = my_module()
+ assert my_obj.get_svm_uuid() is None
+
+
+def test_get_svm_uuid_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'svm/svms', SRR['generic_error'])
+ ])
+ set_module_args(DEFAULT_ARGS)
+ my_obj = my_module()
+ msg = 'Error fetching svm uuid: calling: svm/svms: got Expected error.'
+ assert msg in expect_and_capture_ansible_exception(my_obj.get_svm_uuid, 'fail')['msg']
+
+
+def test_get_vscan_on_demand_task_none():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/vscan/1cd8a442-86d1-11e0-ae1c-123478563412/on-demand-policies', SRR['empty_records'])
+ ])
+ set_module_args(DEFAULT_ARGS)
+ my_obj = my_module()
+ my_obj.svm_uuid = '1cd8a442-86d1-11e0-ae1c-123478563412'
+ assert my_obj.get_demand_task_rest() is None
+
+
+def test_get_vscan_on_demand_task_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'protocols/vscan/1cd8a442-86d1-11e0-ae1c-123478563412/on-demand-policies', SRR['generic_error'])
+ ])
+ set_module_args(DEFAULT_ARGS)
+ my_obj = my_module()
+ my_obj.svm_uuid = '1cd8a442-86d1-11e0-ae1c-123478563412'
+ msg = 'Error fetching on demand task carchi8pytask: calling: protocols/vscan/1cd8a442-86d1-11e0-ae1c-123478563412/on-demand-policies: got Expected error.'
+ assert msg in expect_and_capture_ansible_exception(my_obj.get_demand_task_rest, 'fail')['msg']
+
+
+def test_create_vscan_on_demand_task():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'svm/svms', SRR['svm_info']),
+ ('GET', 'protocols/vscan/1cd8a442-86d1-11e0-ae1c-123478563412/on-demand-policies', SRR['empty_records']),
+ ('POST', 'protocols/vscan/1cd8a442-86d1-11e0-ae1c-123478563412/on-demand-policies', SRR['empty_good'])
+ ])
+ assert create_and_apply(my_module, DEFAULT_ARGS, {})['changed']
+
+
+def test_create_vscan_on_demand_task_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('POST', 'protocols/vscan/1cd8a442-86d1-11e0-ae1c-123478563412/on-demand-policies', SRR['generic_error'])
+ ])
+ set_module_args(DEFAULT_ARGS)
+ my_obj = my_module()
+ my_obj.svm_uuid = '1cd8a442-86d1-11e0-ae1c-123478563412'
+ msg = 'Error creating on demand task carchi8pytask: calling: protocols/vscan/1cd8a442-86d1-11e0-ae1c-123478563412/on-demand-policies: got Expected error.'
+ assert msg in expect_and_capture_ansible_exception(my_obj.create_demand_task_rest, 'fail')['msg']
+
+
+def test_create_vscan_on_demand_task_with_all_options():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'svm/svms', SRR['svm_info']),
+ ('GET', 'protocols/vscan/1cd8a442-86d1-11e0-ae1c-123478563412/on-demand-policies', SRR['empty_records']),
+ ('POST', 'protocols/vscan/1cd8a442-86d1-11e0-ae1c-123478563412/on-demand-policies', SRR['empty_good'])
+ ])
+ module_args = {'file_ext_to_exclude': ['mp3', 'mp4'],
+ 'file_ext_to_include': ['vmdk', 'mp*'],
+ 'max_file_size': '10737418240',
+ 'paths_to_exclude': ['/vol1/cold-files/', '/vol1/cifs/names'],
+ 'schedule': 'weekly'}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_delete_vscan_on_demand_task():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('GET', 'svm/svms', SRR['svm_info']),
+ ('GET', 'protocols/vscan/1cd8a442-86d1-11e0-ae1c-123478563412/on-demand-policies', SRR['on_demand_task']),
+ ('DELETE', 'protocols/vscan/1cd8a442-86d1-11e0-ae1c-123478563412/on-demand-policies/carchi8pytask', SRR['empty_good'])
+ ])
+ module_args = {'state': 'absent'}
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_delete_vscan_on_demand_task_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest']),
+ ('DELETE', 'protocols/vscan/1cd8a442-86d1-11e0-ae1c-123478563412/on-demand-policies/carchi8pytask', SRR['generic_error'])
+ ])
+ set_module_args(DEFAULT_ARGS)
+ my_obj = my_module()
+ my_obj.svm_uuid = '1cd8a442-86d1-11e0-ae1c-123478563412'
+ msg = 'Error deleting on demand task carchi8pytask: calling: ' + \
+ 'protocols/vscan/1cd8a442-86d1-11e0-ae1c-123478563412/on-demand-policies/carchi8pytask: got Expected error.'
+ assert msg in expect_and_capture_ansible_exception(my_obj.delete_demand_task_rest, 'fail')['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_vscan_scanner_pool.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_vscan_scanner_pool.py
new file mode 100644
index 000000000..b80e01e82
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_vscan_scanner_pool.py
@@ -0,0 +1,154 @@
+# (c) 2018, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests for Ansible module: na_ontap_vscan_scanner_pool '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat import unittest
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import Mock
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_vscan_scanner_pool \
+ import NetAppOntapVscanScannerPool as scanner_module # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+class MockONTAPConnection(object):
+ ''' mock server connection to ONTAP host '''
+
+ def __init__(self, kind=None, data=None):
+ ''' save arguments '''
+ self.kind = kind
+ self.params = data
+ self.xml_in = None
+ self.xml_out = None
+
+ def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
+ ''' mock invoke_successfully returning xml data '''
+ self.xml_in = xml
+ if self.kind == 'scanner':
+ xml = self.build_scanner_pool_info(self.params)
+ self.xml_out = xml
+ return xml
+
+ @staticmethod
+ def build_scanner_pool_info(sanner_details):
+ xml = netapp_utils.zapi.NaElement('xml')
+ attributes = {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'vscan-scanner-pool-info': {
+ 'scanner-pool': sanner_details['scanner_pool'],
+ 'scanner-policy': sanner_details['scanner_policy'],
+ 'hostnames': [
+ {'hostname': sanner_details['hostnames'][0]},
+ {'hostname': sanner_details['hostnames'][1]}
+ ],
+ 'privileged-users': [
+ {"privileged-user": sanner_details['privileged_users'][0]},
+ {"privileged-user": sanner_details['privileged_users'][1]}
+ ]
+ }
+ }
+ }
+ xml.translate_struct(attributes)
+ return xml
+
+
+class TestMyModule(unittest.TestCase):
+ ''' Unit tests for na_ontap_job_schedule '''
+
+ def setUp(self):
+ self.mock_scanner = {
+ 'state': 'present',
+ 'scanner_pool': 'test_pool',
+ 'vserver': 'test_vserver',
+ 'hostnames': ['host1', 'host2'],
+ 'privileged_users': ['domain\\admin', 'domain\\carchi8py'],
+ 'scanner_policy': 'primary'
+ }
+
+ def mock_args(self):
+ return {
+ 'state': self.mock_scanner['state'],
+ 'scanner_pool': self.mock_scanner['scanner_pool'],
+ 'vserver': self.mock_scanner['vserver'],
+ 'hostnames': self.mock_scanner['hostnames'],
+ 'privileged_users': self.mock_scanner['privileged_users'],
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'scanner_policy': self.mock_scanner['scanner_policy']
+ }
+
+ def get_scanner_mock_object(self, kind=None):
+ scanner_obj = scanner_module()
+ scanner_obj.asup_log_for_cserver = Mock(return_value=None)
+ if kind is None:
+ scanner_obj.server = MockONTAPConnection()
+ else:
+ scanner_obj.server = MockONTAPConnection(kind='scanner', data=self.mock_scanner)
+ return scanner_obj
+
+ def test_module_fail_when_required_args_missing(self):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ scanner_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+ def test_get_nonexistent_scanner(self):
+ ''' Test if get_scanner_pool returns None for non-existent job '''
+ set_module_args(self.mock_args())
+ result = self.get_scanner_mock_object().get_scanner_pool()
+ assert not result
+
+ def test_get_existing_scanner(self):
+ ''' Test if get_scanner_pool returns None for non-existent job '''
+ set_module_args(self.mock_args())
+ result = self.get_scanner_mock_object('scanner').get_scanner_pool()
+ assert result
+
+ def test_successfully_create(self):
+ set_module_args(self.mock_args())
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_scanner_mock_object().apply()
+ assert exc.value.args[0]['changed']
+
+ def test_create_idempotency(self):
+ set_module_args(self.mock_args())
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_scanner_mock_object('scanner').apply()
+ assert not exc.value.args[0]['changed']
+
+ def test_successfully_delete(self):
+ data = self.mock_args()
+ data['state'] = 'absent'
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_scanner_mock_object('scanner').apply()
+ assert exc.value.args[0]['changed']
+
+ def test_delete_idempotency(self):
+ data = self.mock_args()
+ data['state'] = 'absent'
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_scanner_mock_object().apply()
+ assert not exc.value.args[0]['changed']
+
+ def test_successfully_modify(self):
+ data = self.mock_args()
+ data['hostnames'] = "host1"
+ set_module_args(data)
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_scanner_mock_object('scanner').apply()
+ assert exc.value.args[0]['changed']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_vserver_audit.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_vserver_audit.py
new file mode 100644
index 000000000..9a4ec6f91
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_vserver_audit.py
@@ -0,0 +1,354 @@
+# (c) 2023, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests for Ansible module: na_ontap_vserver_audit '''
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ patch_ansible, call_main, create_module, expect_and_capture_ansible_exception, AnsibleFailJson
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke, register_responses, get_mock_record
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_vserver_audit \
+ import NetAppONTAPVserverAudit as my_module, main as my_main # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+# REST API canned responses when mocking send_request
+SRR = rest_responses({
+ # module specific responses
+ 'audit_record': (
+ 200,
+ {
+ "records": [
+ {
+ "svm": {
+ "uuid": "671aa46e-11ad-11ec-a267-005056b30cfa",
+ "name": "vserver"
+ },
+ "enabled": True,
+ "events": {
+ "authorization_policy": True,
+ "cap_staging": True,
+ "cifs_logon_logoff": False,
+ "file_operations": False,
+ "file_share": True,
+ "security_group": True,
+ "user_account": True
+ },
+ "log_path": "/",
+ "log": {
+ "format": "xml",
+ "retention": {"count": 4},
+ "rotation": {"size": 1048576}
+ },
+ "guarantee": False
+ }
+ ],
+ "num_records": 1
+ }, None
+ ),
+ 'audit_record_modified': (
+ 200,
+ {
+ "records": [
+ {
+ "svm": {
+ "uuid": "671aa46e-11ad-11ec-a267-005056b30cfa",
+ "name": "vserver"
+ },
+ "enabled": False,
+ "events": {
+ "authorization_policy": True,
+ "cap_staging": True,
+ "cifs_logon_logoff": False,
+ "file_operations": False,
+ "file_share": True,
+ "security_group": True,
+ "user_account": True
+ },
+ "log_path": "/",
+ "log": {
+ "format": "xml",
+ "retention": {"count": 4},
+ "rotation": {"size": 1048576}
+ },
+ "guarantee": False
+ }
+ ],
+ "num_records": 1
+ }, None
+ ),
+ "no_record": (
+ 200,
+ {"num_records": 0},
+ None)
+})
+
+ARGS_REST = {
+ 'hostname': 'test',
+ 'username': 'test_user',
+ 'password': 'test_pass!',
+ 'use_rest': 'always',
+ 'vserver': 'vserver',
+}
+
+
+def test_get_nonexistent_audit_config_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/audit', SRR['empty_records']),
+ ])
+ audit_obj = create_module(my_module, ARGS_REST)
+ result = audit_obj.get_vserver_audit_configuration_rest()
+ assert result is None
+
+
+def test_get_existent_audit_config_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/audit', SRR['audit_record']),
+ ])
+ audit_obj = create_module(my_module, ARGS_REST)
+ result = audit_obj.get_vserver_audit_configuration_rest()
+ assert result
+
+
+def test_error_get_existent_audit_config_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/audit', SRR['generic_error']),
+ ])
+ error = call_main(my_main, ARGS_REST, fail=True)['msg']
+ msg = "Error on fetching vserver audit configuration"
+ assert msg in error
+
+
+def test_create_audit_config_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/audit', SRR['empty_records']),
+ ('POST', 'protocols/audit', SRR['empty_good']),
+ ])
+ module_args = {
+ "enabled": False,
+ "events": {
+ "authorization_policy": False,
+ "cap_staging": False,
+ "cifs_logon_logoff": True,
+ "file_operations": True,
+ "file_share": False,
+ "security_group": False,
+ "user_account": False
+ },
+ "log_path": "/",
+ "log": {
+ "format": "xml",
+ "retention": {"count": 4},
+ "rotation": {"size": 1048576}
+ }
+ }
+ assert call_main(my_main, ARGS_REST, module_args)['changed']
+
+
+def test_create_audit_config_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/audit', SRR['empty_records']),
+ ('POST', 'protocols/audit', SRR['generic_error']),
+ ])
+ module_args = {
+ "enabled": False,
+ "events": {
+ "authorization_policy": False,
+ "cap_staging": False,
+ "cifs_logon_logoff": True,
+ "file_operations": True,
+ "file_share": False,
+ "security_group": False,
+ "user_account": False
+ },
+ "log_path": "/",
+ "log": {
+ "format": "xml",
+ "retention": {"count": 4},
+ "rotation": {"size": 1048576}
+ },
+ "guarantee": False
+ }
+ error = call_main(my_main, ARGS_REST, module_args, fail=True)['msg']
+ msg = "Error on creating vserver audit configuration"
+ assert msg in error
+
+
+def test_modify_audit_config_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/audit', SRR['audit_record']),
+ ('PATCH', 'protocols/audit/671aa46e-11ad-11ec-a267-005056b30cfa', SRR['empty_good']),
+ ])
+ module_args = {
+ "enabled": True,
+ "events": {
+ "authorization_policy": True,
+ "cap_staging": True,
+ "cifs_logon_logoff": False,
+ "file_operations": False,
+ "file_share": True,
+ "security_group": True,
+ "user_account": True
+ },
+ "log_path": "/tmp",
+ "log": {
+ "format": "evtx",
+ "retention": {"count": 5},
+ "rotation": {"size": 10485760}
+ }
+
+ }
+ assert call_main(my_main, ARGS_REST, module_args)['changed']
+
+
+def test_enable_audit_config_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/audit', SRR['audit_record']),
+ ('PATCH', 'protocols/audit/671aa46e-11ad-11ec-a267-005056b30cfa', SRR['empty_good']),
+ ])
+ module_args = {
+ "enabled": False
+ }
+ assert call_main(my_main, ARGS_REST, module_args)['changed']
+
+
+def test_error_modify_audit_config_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/audit', SRR['audit_record']),
+ ('PATCH', 'protocols/audit/671aa46e-11ad-11ec-a267-005056b30cfa', SRR['generic_error']),
+ ])
+ module_args = {
+ "enabled": True,
+ "events": {
+ "authorization_policy": True,
+ "cap_staging": True,
+ "cifs_logon_logoff": False,
+ "file_operations": False,
+ "file_share": True,
+ "security_group": True,
+ "user_account": True
+ },
+ "log_path": "/tmp",
+ "log": {
+ "format": "evtx",
+ "retention": {"count": 5},
+ "rotation": {"size": 10485760}
+ }
+ }
+ error = call_main(my_main, ARGS_REST, module_args, fail=True)['msg']
+ msg = "Error on modifying vserver audit configuration"
+ assert msg in error
+
+
+def test_error_enabling_audit_config_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/audit', SRR['audit_record']),
+ ('PATCH', 'protocols/audit/671aa46e-11ad-11ec-a267-005056b30cfa', SRR['empty_good']),
+ ('PATCH', 'protocols/audit/671aa46e-11ad-11ec-a267-005056b30cfa', SRR['empty_good']),
+ ])
+ module_args = {
+ "enabled": False,
+ "events": {
+ "authorization_policy": False,
+ "cap_staging": False,
+ "cifs_logon_logoff": False,
+ "file_operations": False,
+ "file_share": True,
+ "security_group": True,
+ "user_account": True
+ },
+ }
+ assert call_main(my_main, ARGS_REST, module_args)['changed']
+
+
+def test_error_disabling_events_audit_config_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ])
+ module_args = {
+ "events": {
+ "authorization_policy": False,
+ "cap_staging": False,
+ "cifs_logon_logoff": False,
+ "file_operations": False,
+ "file_share": False,
+ "security_group": False,
+ "user_account": False
+ },
+ }
+ error = call_main(my_main, ARGS_REST, module_args, fail=True)['msg']
+ msg = "At least one event should be enabled"
+ assert msg in error
+
+
+@patch('time.sleep')
+def test_delete_audit_config_rest(sleep):
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/audit', SRR['audit_record']),
+ ('PATCH', 'protocols/audit/671aa46e-11ad-11ec-a267-005056b30cfa', SRR['empty_good']),
+ ('GET', 'protocols/audit', SRR['audit_record_modified']),
+ ('DELETE', 'protocols/audit/671aa46e-11ad-11ec-a267-005056b30cfa', SRR['empty_good']),
+ ])
+ module_args = {
+ "state": "absent"
+ }
+ assert call_main(my_main, ARGS_REST, module_args)['changed']
+
+
+@patch('time.sleep')
+def test_error_delete_audit_config_rest(sleep):
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/audit', SRR['audit_record']),
+ ('PATCH', 'protocols/audit/671aa46e-11ad-11ec-a267-005056b30cfa', SRR['empty_good']),
+ ('GET', 'protocols/audit', SRR['audit_record_modified']),
+ ('DELETE', 'protocols/audit/671aa46e-11ad-11ec-a267-005056b30cfa', SRR['generic_error']),
+ ])
+ module_args = {
+ "state": "absent"
+ }
+ error = call_main(my_main, ARGS_REST, module_args, fail=True)['msg']
+ msg = "Error on deleting vserver audit configuration"
+ assert msg in error
+
+
+def test_create_idempotent_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/audit', SRR['audit_record']),
+ ])
+ module_args = {
+ 'state': 'present'
+ }
+ assert not call_main(my_main, ARGS_REST, module_args)['changed']
+
+
+def test_delete_idempotent_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_10_1']),
+ ('GET', 'protocols/audit', SRR['empty_records'])
+ ])
+ module_args = {
+ 'state': 'absent'
+ }
+ assert not call_main(my_main, ARGS_REST, module_args)['changed']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_vserver_cifs_security.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_vserver_cifs_security.py
new file mode 100644
index 000000000..6cb823d40
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_vserver_cifs_security.py
@@ -0,0 +1,111 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test template for ONTAP Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ patch_ansible, create_module, create_and_apply, expect_and_capture_ansible_exception, AnsibleFailJson
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke, register_responses, get_mock_record
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_vserver_cifs_security \
+ import NetAppONTAPCifsSecurity as cifs_security_module # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+cifs_security_info = {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'cifs-security': {
+ 'is_aes_encryption_enabled': False,
+ 'lm_compatibility_level': 'krb',
+ 'kerberos_clock_skew': 20
+ }
+ }
+}
+
+ZRR = zapi_responses({
+ 'cifs_security_info': build_zapi_response(cifs_security_info)
+})
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'vserver': 'vserver',
+ 'use_rest': 'never',
+ 'is_aes_encryption_enabled': False,
+ 'lm_compatibility_level': 'krb',
+ 'kerberos_clock_skew': 20
+}
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ cifs_security_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+
+def test_get():
+ register_responses([
+ ('cifs-security-get-iter', ZRR['cifs_security_info'])
+ ])
+ cifs_obj = create_module(cifs_security_module, DEFAULT_ARGS)
+ result = cifs_obj.cifs_security_get_iter()
+ assert result
+
+
+def test_modify_int_option():
+ register_responses([
+ ('cifs-security-get-iter', ZRR['cifs_security_info']),
+ ('cifs-security-modify', ZRR['success']),
+ ])
+ module_args = {
+ 'kerberos_clock_skew': 15
+ }
+ assert create_and_apply(cifs_security_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_modify_bool_option():
+ register_responses([
+ ('cifs-security-get-iter', ZRR['cifs_security_info']),
+ ('cifs-security-modify', ZRR['success']),
+ ])
+ module_args = {
+ 'is_aes_encryption_enabled': True
+ }
+ assert create_and_apply(cifs_security_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_modify_bool_option():
+ register_responses([
+ ('cifs-security-get-iter', ZRR['cifs_security_info']),
+ ('cifs-security-modify', ZRR['error']),
+ ])
+ module_args = {
+ 'is_aes_encryption_enabled': True
+ }
+ error = create_and_apply(cifs_security_module, DEFAULT_ARGS, fail=True)['msg']
+ assert 'Error modifying cifs security' in error
+
+
+def test_if_all_methods_catch_exception():
+ register_responses([
+ ('cifs-security-modify', ZRR['error'])
+ ])
+ module_args = {'use_rest': 'never', 'is_aes_encryption_enabled': True}
+ current = {}
+ my_obj = create_module(cifs_security_module, DEFAULT_ARGS, module_args)
+
+ error = expect_and_capture_ansible_exception(my_obj.cifs_security_modify, 'fail', current)['msg']
+ assert 'Error modifying cifs security on vserver: NetApp API failed. Reason - 12345:synthetic error for UT purpose' in error
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_vserver_peer.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_vserver_peer.py
new file mode 100644
index 000000000..2af8a151f
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_vserver_peer.py
@@ -0,0 +1,440 @@
+# (c) 2018-2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test template for ONTAP Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import sys
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import call_main, set_module_args,\
+ AnsibleFailJson, patch_ansible, create_module, create_and_apply, expect_and_capture_ansible_exception, assert_warning_was_raised, print_warnings
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke,\
+ register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_responses
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_vserver_peer \
+ import NetAppONTAPVserverPeer as vserver_peer, main as my_main # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+DEFAULT_ARGS = {
+ 'vserver': 'test',
+ 'peer_vserver': 'test_peer',
+ 'peer_cluster': 'test_cluster_peer',
+ 'local_name_for_peer': 'peer_name',
+ 'local_name_for_source': 'source_name',
+ 'applications': ['snapmirror'],
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'feature_flags': {'no_cserver_ems': True},
+ 'use_rest': 'never'
+}
+
+vserver_peer_info = {
+ 'num-records': 1,
+ 'attributes-list': {
+ 'vserver-peer-info': {
+ 'remote-vserver-name': 'test_peer',
+ 'vserver': 'test',
+ 'peer-vserver': 'test_peer',
+ 'peer-state': 'peered'
+ }
+ }
+}
+
+cluster_info = {
+ 'attributes': {
+ 'cluster-identity-info': {'cluster-name': 'test_cluster_peer'}
+ }
+}
+
+ZRR = zapi_responses({
+ 'vserver_peer_info': build_zapi_response(vserver_peer_info),
+ 'cluster_info': build_zapi_response(cluster_info)
+})
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ my_obj = vserver_peer()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+
+def test_successful_create():
+ ''' Test successful create '''
+ register_responses([
+ ('vserver-peer-get-iter', ZRR['empty']),
+ ('vserver-peer-create', ZRR['success']),
+ ('vserver-peer-get-iter', ZRR['vserver_peer_info']),
+ ('vserver-peer-accept', ZRR['success'])
+ ])
+ args = {'dest_hostname': 'test_destination'}
+ assert create_and_apply(vserver_peer, DEFAULT_ARGS, args)['changed']
+
+
+def test_successful_create_new_style():
+ ''' Test successful create '''
+ register_responses([
+ ('vserver-peer-get-iter', ZRR['empty']),
+ ('vserver-peer-create', ZRR['success']),
+ ('vserver-peer-get-iter', ZRR['vserver_peer_info']),
+ ('vserver-peer-accept', ZRR['success'])
+ ])
+ default_args = DEFAULT_ARGS
+ # test without local name
+ del default_args['local_name_for_peer']
+ del default_args['local_name_for_source']
+ args = {'peer_options': {'hostname': 'test_destination'}}
+ assert create_and_apply(vserver_peer, default_args, args)['changed']
+
+
+def test_create_idempotency():
+ ''' Test create idempotency '''
+ register_responses([
+ ('vserver-peer-get-iter', ZRR['vserver_peer_info'])
+ ])
+ args = {'peer_options': {'hostname': 'test_destination'}}
+ assert create_and_apply(vserver_peer, DEFAULT_ARGS, args)['changed'] is False
+
+
+def test_successful_delete():
+ ''' Test successful delete peer '''
+ register_responses([
+ ('vserver-peer-get-iter', ZRR['vserver_peer_info']),
+ ('vserver-peer-delete', ZRR['success'])
+ ])
+ args = {
+ 'peer_options': {'hostname': 'test_destination'},
+ 'state': 'absent'
+ }
+ assert create_and_apply(vserver_peer, DEFAULT_ARGS, args)['changed']
+
+
+def test_delete_idempotency():
+ ''' Test delete idempotency '''
+ register_responses([
+ ('vserver-peer-get-iter', ZRR['empty'])
+ ])
+ args = {'dest_hostname': 'test_destination', 'state': 'absent'}
+ assert create_and_apply(vserver_peer, DEFAULT_ARGS, args)['changed'] is False
+
+
+def test_helper_vserver_peer_get_iter():
+ ''' Test vserver_peer_get_iter method '''
+ args = {'dest_hostname': 'test_destination'}
+ obj = create_module(vserver_peer, DEFAULT_ARGS, args)
+ result = obj.vserver_peer_get_iter('source')
+ print(result.to_string(pretty=True))
+ assert result['query'] is not None
+ assert result['query']['vserver-peer-info'] is not None
+ info = result['query']['vserver-peer-info']
+ assert info['vserver'] == DEFAULT_ARGS['vserver']
+ assert info['remote-vserver-name'] == DEFAULT_ARGS['peer_vserver']
+
+
+def test_dest_hostname_absent():
+ my_obj = create_module(vserver_peer, DEFAULT_ARGS)
+ assert my_obj.parameters['hostname'] == my_obj.parameters['dest_hostname']
+
+
+def test_get_packet():
+ ''' Test vserver_peer_get method '''
+ register_responses([
+ ('vserver-peer-get-iter', ZRR['vserver_peer_info'])
+ ])
+ args = {'dest_hostname': 'test_destination'}
+ obj = create_module(vserver_peer, DEFAULT_ARGS, args)
+ result = obj.vserver_peer_get()
+ assert 'vserver' in result.keys()
+ assert 'peer_vserver' in result.keys()
+ assert 'peer_state' in result.keys()
+
+
+def test_error_on_missing_params_create():
+ ''' Test error thrown from vserver_peer_create '''
+ register_responses([
+ ('vserver-peer-get-iter', ZRR['empty'])
+ ])
+ default_args = DEFAULT_ARGS.copy()
+ del default_args['applications']
+ args = {'dest_hostname': 'test_destination'}
+ msg = create_and_apply(vserver_peer, default_args, args, fail=True)['msg']
+ assert 'applications parameter is missing' in msg
+
+
+def test_get_peer_cluster_called():
+ ''' Test get_peer_cluster_name called if peer_cluster is missing '''
+ register_responses([
+ ('vserver-peer-get-iter', ZRR['empty']),
+ ('cluster-identity-get', ZRR['cluster_info']),
+ ('vserver-peer-create', ZRR['success']),
+ ('vserver-peer-get-iter', ZRR['vserver_peer_info']),
+ ('vserver-peer-accept', ZRR['success'])
+ ])
+ default_args = DEFAULT_ARGS.copy()
+ del default_args['peer_cluster']
+ args = {'dest_hostname': 'test_destination'}
+ assert create_and_apply(vserver_peer, default_args, args)['changed']
+
+
+def test_get_peer_cluster_packet():
+ ''' Test get_peer_cluster_name xml packet '''
+ register_responses([
+ ('cluster-identity-get', ZRR['cluster_info'])
+ ])
+ args = {'dest_hostname': 'test_destination'}
+ obj = create_module(vserver_peer, DEFAULT_ARGS, args)
+ result = obj.get_peer_cluster_name()
+ assert result == DEFAULT_ARGS['peer_cluster']
+
+
+def test_error_on_first_ZAPI_call():
+ ''' Test error thrown from vserver_peer_get '''
+ register_responses([
+ ('vserver-peer-get-iter', ZRR['error'])
+ ])
+ args = {'dest_hostname': 'test_destination'}
+ msg = create_and_apply(vserver_peer, DEFAULT_ARGS, args, fail=True)['msg']
+ assert 'Error fetching vserver peer' in msg
+
+
+def test_error_create_new_style():
+ ''' Test error in create - peer not visible '''
+ register_responses([
+ ('vserver-peer-get-iter', ZRR['empty']),
+ ('vserver-peer-create', ZRR['success']),
+ ('vserver-peer-get-iter', ZRR['empty'])
+ ])
+ args = {'peer_options': {'hostname': 'test_destination'}}
+ msg = create_and_apply(vserver_peer, DEFAULT_ARGS, args, fail=True)['msg']
+ assert 'Error retrieving vserver peer information while accepting' in msg
+
+
+def test_if_all_methods_catch_exception():
+ register_responses([
+ ('vserver-peer-delete', ZRR['error']),
+ ('cluster-identity-get', ZRR['error']),
+ ('vserver-peer-create', ZRR['error'])
+ ])
+ args = {'dest_hostname': 'test_destination'}
+ my_obj = create_module(vserver_peer, DEFAULT_ARGS, args)
+
+ error = expect_and_capture_ansible_exception(my_obj.vserver_peer_delete, 'fail', current={'local_peer_vserver': 'test_peer'})['msg']
+ assert 'Error deleting vserver peer test: NetApp API failed. Reason - 12345:synthetic error for UT purpose' in error
+
+ error = expect_and_capture_ansible_exception(my_obj.get_peer_cluster_name, 'fail')['msg']
+ assert 'Error fetching peer cluster name for peer vserver test_peer: NetApp API failed. Reason - 12345:synthetic error for UT purpose' in error
+
+ error = expect_and_capture_ansible_exception(my_obj.vserver_peer_create, 'fail')['msg']
+ assert 'Error creating vserver peer test: NetApp API failed. Reason - 12345:synthetic error for UT purpose' in error
+
+
+def test_error_in_vserver_accept():
+ register_responses([
+ ('vserver-peer-get-iter', ZRR['empty']),
+ ('vserver-peer-create', ZRR['success']),
+ ('vserver-peer-get-iter', ZRR['vserver_peer_info']),
+ ('vserver-peer-accept', ZRR['error'])
+ ])
+ args = {'dest_hostname': 'test_destination'}
+ msg = create_and_apply(vserver_peer, DEFAULT_ARGS, args, fail=True)['msg']
+ assert 'Error accepting vserver peer test_peer: NetApp API failed. Reason - 12345:synthetic error for UT purpose' == msg
+
+
+DEFAULT_ARGS_REST = {
+ "hostname": "10.193.177.97",
+ "username": "admin",
+ "password": "netapp123",
+ "https": "yes",
+ "validate_certs": "no",
+ "use_rest": "always",
+ "state": "present",
+ "dest_hostname": "0.0.0.0",
+ "vserver": "svmsrc3",
+ "peer_vserver": "svmdst3",
+ "applications": ['snapmirror']
+}
+
+
+SRR = rest_responses({
+ 'vserver_peer_info': (200, {
+ "records": [{
+ "vserver": "svmsrc1",
+ "peer_vserver": "svmdst1",
+ "name": "svmdst1",
+ "state": "peered",
+ "local_peer_vserver_uuid": "545d2562-2fca-11ec-8016-005056b3f5d5"
+ }],
+ 'num_records': 1
+ }, None),
+ 'cluster_info': (200, {"name": "mohanontap98cluster"}, None),
+ 'job_info': (200, {
+ "job": {
+ "uuid": "d78811c1-aebc-11ec-b4de-005056b30cfa",
+ "_links": {"self": {"href": "/api/cluster/jobs/d78811c1-aebc-11ec-b4de-005056b30cfa"}}
+ }}, None),
+ 'job_not_found': (404, "", {"message": "entry doesn't exist", "code": "4", "target": "uuid"})
+})
+
+
+def test_ensure_get_server_called():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'svm/peers', SRR['vserver_peer_info'])
+ ])
+ assert create_and_apply(vserver_peer, DEFAULT_ARGS_REST)['changed'] is False
+
+
+def test_ensure_create_server_called():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'svm/peers', SRR['empty_records']),
+ ('POST', 'svm/peers', SRR['success']),
+ ('GET', 'svm/peers', SRR['vserver_peer_info']),
+ ('PATCH', 'svm/peers', SRR['success'])
+ ])
+ assert create_and_apply(vserver_peer, DEFAULT_ARGS_REST, {'peer_cluster': 'peer_cluster'})['changed']
+
+
+def test_ensure_delete_server_called():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'svm/peers', SRR['vserver_peer_info']),
+ ('DELETE', 'svm/peers', SRR['success'])
+ ])
+ assert create_and_apply(vserver_peer, DEFAULT_ARGS_REST, {'state': 'absent'})['changed']
+
+
+def test_create_vserver_peer_without_cluster_name_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'svm/peers', SRR['empty_records']),
+ ('GET', 'cluster', SRR['cluster_info']),
+ ('POST', 'svm/peers', SRR['success']),
+ ('GET', 'svm/peers', SRR['vserver_peer_info']),
+ ('PATCH', 'svm/peers', SRR['success'])
+ ])
+ assert create_and_apply(vserver_peer, DEFAULT_ARGS_REST)['changed']
+
+
+def test_create_vserver_peer_with_local_name_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'svm/peers', SRR['empty_records']),
+ ('GET', 'cluster', SRR['cluster_info']),
+ ('POST', 'svm/peers', SRR['success']),
+ ('GET', 'svm/peers', SRR['vserver_peer_info']),
+ ('PATCH', 'svm/peers', SRR['success'])
+ ])
+ args = {
+ 'local_name_for_peer': 'peer',
+ 'local_name_for_source': 'source'
+ }
+ assert create_and_apply(vserver_peer, DEFAULT_ARGS_REST, args)['changed']
+
+
+def test_error_in_vserver_accept_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'svm/peers', SRR['empty_records']),
+ ('GET', 'cluster', SRR['cluster_info']),
+ ('POST', 'svm/peers', SRR['success']),
+ ('GET', 'svm/peers', SRR['vserver_peer_info']),
+ ('PATCH', 'svm/peers', SRR['generic_error'])
+ ])
+ msg = create_and_apply(vserver_peer, DEFAULT_ARGS_REST, fail=True)['msg']
+ assert 'Error accepting vserver peer relationship on svmdst3: calling: svm/peers: got Expected error.' == msg
+
+
+def test_error_in_vserver_get_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'svm/peers', SRR['generic_error'])
+ ])
+ msg = create_and_apply(vserver_peer, DEFAULT_ARGS_REST, fail=True)['msg']
+ assert 'Error fetching vserver peer svmsrc3: calling: svm/peers: got Expected error.' == msg
+
+
+def test_error_in_vserver_delete_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'svm/peers', SRR['vserver_peer_info']),
+ ('DELETE', 'svm/peers', SRR['generic_error'])
+ ])
+ msg = create_and_apply(vserver_peer, DEFAULT_ARGS_REST, {'state': 'absent'}, fail=True)['msg']
+ assert 'Error deleting vserver peer relationship on svmsrc3: calling: svm/peers: got Expected error.' == msg
+
+
+def test_error_in_peer_cluster_get_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'svm/peers', SRR['empty_records']),
+ ('GET', 'cluster', SRR['generic_error'])
+ ])
+ msg = create_and_apply(vserver_peer, DEFAULT_ARGS_REST, fail=True)['msg']
+ assert 'Error fetching peer cluster name for peer vserver svmdst3: calling: cluster: got Expected error.' == msg
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.has_netapp_lib')
+def test_missing_netapp_lib(mock_has_netapp_lib):
+ mock_has_netapp_lib.return_value = False
+ msg = 'Error: the python NetApp-Lib module is required. Import error: None'
+ assert msg == create_module(vserver_peer, DEFAULT_ARGS, fail=True)['msg']
+
+
+@patch('time.sleep')
+def test_job_error_in_vserver_delete_rest(dont_sleep):
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'svm/peers', SRR['vserver_peer_info']),
+ ('DELETE', 'svm/peers', SRR['job_info']),
+ ('GET', 'cluster/jobs/d78811c1-aebc-11ec-b4de-005056b30cfa', SRR['job_not_found']),
+ ('GET', 'cluster/jobs/d78811c1-aebc-11ec-b4de-005056b30cfa', SRR['job_not_found']),
+ ('GET', 'cluster/jobs/d78811c1-aebc-11ec-b4de-005056b30cfa', SRR['job_not_found']),
+ ('GET', 'cluster/jobs/d78811c1-aebc-11ec-b4de-005056b30cfa', SRR['job_not_found'])
+ ])
+ assert create_and_apply(vserver_peer, DEFAULT_ARGS_REST, {'state': 'absent'})['changed']
+ print_warnings()
+ assert_warning_was_raised('Ignoring job status, assuming success - Issue #45.')
+
+
+@patch('time.sleep')
+def test_job_error_in_vserver_create_rest(dont_sleep):
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'cluster', SRR['is_rest_9_9_0']),
+ ('GET', 'svm/peers', SRR['empty_records']),
+ ('GET', 'cluster', SRR['empty_records']),
+ ('POST', 'svm/peers', SRR['job_info']),
+ ('GET', 'cluster/jobs/d78811c1-aebc-11ec-b4de-005056b30cfa', SRR['job_not_found']),
+ ('GET', 'cluster/jobs/d78811c1-aebc-11ec-b4de-005056b30cfa', SRR['job_not_found']),
+ ('GET', 'cluster/jobs/d78811c1-aebc-11ec-b4de-005056b30cfa', SRR['job_not_found']),
+ ('GET', 'cluster/jobs/d78811c1-aebc-11ec-b4de-005056b30cfa', SRR['job_not_found']),
+ ('GET', 'svm/peers', SRR['empty_records']),
+ ])
+ assert call_main(my_main, DEFAULT_ARGS_REST, fail=True)['msg'] == 'Error reading vserver peer information on peer svmdst3'
+ print_warnings()
+ assert_warning_was_raised('Ignoring job status, assuming success - Issue #45.')
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_vserver_peer_permissions.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_vserver_peer_permissions.py
new file mode 100644
index 000000000..b9d5af9b4
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_vserver_peer_permissions.py
@@ -0,0 +1,226 @@
+# (c) 2023, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+from __future__ import (absolute_import, division, print_function)
+
+__metaclass__ = type
+
+import pytest
+import sys
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args, \
+ patch_ansible, create_and_apply, create_module, expect_and_capture_ansible_exception, call_main
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import patch_request_and_invoke, register_responses, get_mock_record
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_vserver_peer_permissions \
+ import NetAppONTAPVserverPeerPermissions as my_module, main as my_main # module under test
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+SRR = rest_responses({
+ 'peer_record': (200, {
+ "records": [
+ {
+ "svm": {"name": "ansibleSVM", "uuid": "e3cb5c7fcd20"},
+ "cluster_peer": {"name": "test912-2", "uuid": "1e3cb5c7fcd20"},
+ "applications": ['snapmirror', 'flexcache'],
+ }],
+ "num_records": 1
+ }, None),
+ "no_record": (
+ 200,
+ {"num_records": 0},
+ None)
+})
+
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'vserver': 'ansibleSVM',
+ 'cluster_peer': 'test912-2',
+ 'applications': ['snapmirror'],
+}
+
+
+def test_error_validate_vserver_name_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96'])
+ ])
+ module_args = {
+ 'vserver': '*',
+ 'cluster_peer': 'test912-2'
+ }
+ error = call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ print('Info: %s' % error)
+ msg = 'As svm name * represents all svms and created by default, please provide a specific SVM name'
+ assert msg in error
+
+
+def test_error_validate_vserver_apps_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96'])
+ ])
+ module_args = {
+ 'state': 'present',
+ 'vserver': 'ansibleSVM',
+ 'cluster_peer': 'test912-2',
+ 'applications': ['']
+ }
+ error = call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ print('Info: %s' % error)
+ msg = 'Applications field cannot be empty, at least one application must be specified'
+ assert msg in error
+
+
+def test_get_vserver_peer_permission_rest_none():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'svm/peer-permissions', SRR['empty_records'])
+ ])
+ module_args = {
+ 'vserver': 'ansibleSVM',
+ 'cluster_peer': 'test912-2',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ result = my_obj.get_vserver_peer_permission_rest()
+ assert result is None
+
+
+def test_get_vserver_peer_permission_rest_error():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'svm/peer-permissions', SRR['generic_error'])
+ ])
+ module_args = {
+ 'vserver': 'ansibleSVM',
+ 'cluster_peer': 'test912-2',
+ }
+ my_module_object = create_module(my_module, DEFAULT_ARGS, module_args)
+ msg = 'Error on fetching vserver peer permissions'
+ assert msg in expect_and_capture_ansible_exception(my_module_object.get_vserver_peer_permission_rest, 'fail')['msg']
+
+
+def test_create_vserver_peer_permission_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'svm/peer-permissions', SRR['empty_records']),
+ ('POST', 'svm/peer-permissions', SRR['empty_good'])
+ ])
+ module_args = {
+ 'vserver': 'ansibleSVM',
+ 'cluster_peer': 'test912-2',
+ 'applications': ['snapmirror']
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_vserver_peer_permission_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'svm/peer-permissions', SRR['empty_records']),
+ ('POST', 'svm/peer-permissions', SRR['generic_error'])
+ ])
+ module_args = {
+ 'vserver': 'ansibleSVM',
+ 'cluster_peer': 'test912-2',
+ 'applications': ['snapmirror']
+ }
+ error = call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ print('Info: %s' % error)
+ msg = 'Error on creating vserver peer permissions'
+ assert msg in error
+
+
+def test_modify_vserver_peer_permission_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'svm/peer-permissions', SRR['peer_record']),
+ ('PATCH', 'svm/peer-permissions/1e3cb5c7fcd20/e3cb5c7fcd20', SRR['empty_good'])
+ ])
+ module_args = {
+ 'vserver': 'ansibleSVM',
+ 'cluster_peer': 'test912-2',
+ 'applications': ['snapmirror']
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_modify_vserver_peer_permission_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'svm/peer-permissions', SRR['peer_record']),
+ ('PATCH', 'svm/peer-permissions/1e3cb5c7fcd20/e3cb5c7fcd20', SRR['generic_error'])
+ ])
+ module_args = {
+ 'vserver': 'ansibleSVM',
+ 'cluster_peer': 'test912-2',
+ 'applications': ['snapmirror']
+ }
+ error = call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ print('Info: %s' % error)
+ msg = 'Error on modifying vserver peer permissions'
+ assert msg in error
+
+
+def test_delete_vserver_peer_permission_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'svm/peer-permissions', SRR['peer_record']),
+ ('DELETE', 'svm/peer-permissions/1e3cb5c7fcd20/e3cb5c7fcd20', SRR['empty_good'])
+ ])
+ module_args = {
+ 'state': 'absent',
+ 'vserver': 'ansibleSVM',
+ 'cluster_peer': 'test912-2',
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_error_delete_vserver_peer_permission_rest():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'svm/peer-permissions', SRR['peer_record']),
+ ('DELETE', 'svm/peer-permissions/1e3cb5c7fcd20/e3cb5c7fcd20', SRR['generic_error'])
+ ])
+ module_args = {
+ 'state': 'absent',
+ 'vserver': 'ansibleSVM',
+ 'cluster_peer': 'test912-2'
+ }
+ error = call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ print('Info: %s' % error)
+ msg = 'Error on deleting vserver peer permissions'
+ assert msg in error
+
+
+def test_successfully_vserver_peer_permission_rest_idempotency():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'svm/peer-permissions', SRR['peer_record']),
+ ])
+ module_args = {
+ 'vserver': 'ansibleSVM',
+ 'cluster_peer': 'test912-2',
+ 'applications': ['snapmirror', 'flexcache']
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_successfully_delete_vserver_peer_permission_rest_idempotency():
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_96']),
+ ('GET', 'svm/peer-permissions', SRR['empty_records']),
+ ])
+ module_args = {
+ 'state': 'absent',
+ 'vserver': 'ansibleSVM',
+ 'cluster_peer': 'test912-2',
+ 'applications': ['snapmirror', 'flexcache']
+ }
+ assert not call_main(my_main, DEFAULT_ARGS, module_args)['changed']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_wait_for_condition.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_wait_for_condition.py
new file mode 100644
index 000000000..b851a9842
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_wait_for_condition.py
@@ -0,0 +1,485 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test template for ONTAP Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import pytest
+import sys
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import\
+ call_main, create_module, expect_and_capture_ansible_exception, patch_ansible
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import\
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.rest_factory import rest_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_responses
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_wait_for_condition \
+ import NetAppONTAPWFC as my_module, main as my_main
+
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not be available')
+
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+def sp_image_update_progress_info(in_progress=True):
+ return {
+ 'attributes': {
+ 'service-processor-image-update-progress-info': {
+ 'is-in-progress': 'true' if in_progress else 'false',
+ }
+ }
+ }
+
+
+def sp_info(version):
+ return {
+ 'attributes': {
+ 'service-processor--info': {
+ 'firmware-version': version,
+ }
+ }
+ }
+
+
+ZRR = zapi_responses({
+ 'sp_info_3_09': build_zapi_response(sp_info('3.09'), 1),
+ 'sp_info_3_10': build_zapi_response(sp_info('3.10'), 1),
+ 'sp_image_update_progress_info_in_progress': build_zapi_response(sp_image_update_progress_info(True), 1),
+ 'sp_image_update_progress_info_idle': build_zapi_response(sp_image_update_progress_info(False), 1),
+})
+
+
+SRR = rest_responses({
+ 'one_record_home_node': (200, {'records': [
+ {'name': 'node2_abc_if',
+ 'uuid': '54321',
+ 'enabled': True,
+ 'location': {'home_port': {'name': 'e0c'}, 'home_node': {'name': 'node2'}, 'node': {'name': 'node2'}, 'port': {'name': 'e0c'}}
+ }]}, None),
+ 'one_record_vserver': (200, {'records': [{
+ 'name': 'abc_if',
+ 'uuid': '54321',
+ 'svm': {'name': 'vserver', 'uuid': 'svm_uuid'},
+ 'data_protocol': ['nfs'],
+ 'enabled': True,
+ 'ip': {'address': '10.11.12.13', 'netmask': '255.192.0.0'},
+ 'location': {
+ 'home_port': {'name': 'e0c'},
+ 'home_node': {'name': 'node2'},
+ 'node': {'name': 'node2'},
+ 'port': {'name': 'e0c'},
+ 'auto_revert': True,
+ 'failover': True
+ },
+ 'service_policy': {'name': 'data-mgmt'}
+ }]}, None),
+ 'two_records': (200, {'records': [{'name': 'node2_abc_if'}, {'name': 'node2_abc_if'}]}, None),
+ 'error_precluster': (500, None, {'message': 'are available in precluster.'}),
+ 'cluster_identity': (200, {'location': 'Oz', 'name': 'abc'}, None),
+ 'node_309_online': (200, {'records': [
+ {'service_processor': {'firmware_version': '3.09', 'state': 'online'}}
+ ]}, None),
+ 'node_309_updating': (200, {'records': [
+ {'service_processor': {'firmware_version': '3.09', 'state': 'updating'}}
+ ]}, None),
+ 'node_310_online': (200, {'records': [
+ {'service_processor': {'firmware_version': '3.10', 'state': 'online'}}
+ ]}, None),
+ 'snapmirror_relationship': (200, {'records': [
+ {'state': 'snapmirrored'}
+ ]}, None),
+}, False)
+
+DEFAULT_ARGS = {
+ 'hostname': '10.10.10.10',
+ 'username': 'admin',
+ 'password': 'password',
+ 'attributes': {
+ 'node': 'node1',
+ 'expected_version': '3.10'
+ }
+}
+
+
+def test_module_fail_when_required_args_missing():
+ ''' required arguments are reported as errors '''
+ module_args = {
+ 'use_rest': 'never'
+ }
+ error = create_module(my_module, module_args, fail=True)['msg']
+ assert 'missing required arguments:' in error
+ assert 'name' in error
+ assert 'conditions' in error
+
+
+@patch('time.sleep')
+def test_rest_successful_wait_for_sp_upgrade(dont_sleep):
+ ''' Test successful sp_upgrade check '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'cluster/nodes', SRR['node_309_online']),
+ ('GET', 'cluster/nodes', SRR['node_309_online']),
+ ('GET', 'cluster/nodes', SRR['node_309_updating']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'name': 'sp_upgrade',
+ 'conditions': 'is_in_progress',
+ }
+ results = call_main(my_main, DEFAULT_ARGS, module_args)
+ assert results['msg'] == 'matched condition: is_in_progress'
+ assert results['states'] == 'online*2,updating'
+ assert results['last_state'] == 'updating'
+
+
+@patch('time.sleep')
+def test_rest_successful_wait_for_snapmirror_relationship(dont_sleep):
+ ''' Test successful snapmirror_relationship check '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'snapmirror/relationships', SRR['snapmirror_relationship']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'name': 'snapmirror_relationship',
+ 'conditions': 'transfer_state',
+ 'attributes': {
+ 'destination_path': 'path',
+ 'expected_transfer_state': 'idle'
+ }
+ }
+ results = call_main(my_main, DEFAULT_ARGS, module_args)
+ assert results['msg'] == 'matched condition: transfer_state'
+ # these are generated from dictionaries keys, and sequence is not guaranteed with python 3.5
+ assert results['states'] in ['snapmirrored,idle', 'idle']
+ assert results['last_state'] == 'idle'
+
+
+@patch('time.sleep')
+def test_rest_successful_wait_for_sp_version(dont_sleep):
+ ''' Test successful sp_version check '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'cluster/nodes', SRR['node_309_online']),
+ ('GET', 'cluster/nodes', SRR['node_309_online']),
+ ('GET', 'cluster/nodes', SRR['node_309_updating']),
+ ('GET', 'cluster/nodes', SRR['generic_error']),
+ ('GET', 'cluster/nodes', SRR['zero_records']),
+ ('GET', 'cluster/nodes', SRR['node_309_updating']),
+ ('GET', 'cluster/nodes', SRR['node_310_online']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'name': 'sp_version',
+ 'conditions': 'firmware_version',
+ }
+ results = call_main(my_main, DEFAULT_ARGS, module_args)
+ assert results['msg'] == 'matched condition: firmware_version'
+ assert results['states'] == '3.09*4,3.10'
+ assert results['last_state'] == '3.10'
+
+
+@patch('time.sleep')
+def test_rest_successful_wait_for_sp_version_not_matched(dont_sleep):
+ ''' Test successful sp_version check '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'cluster/nodes', SRR['node_309_online']),
+ ('GET', 'cluster/nodes', SRR['node_309_online']),
+ ('GET', 'cluster/nodes', SRR['node_309_updating']),
+ ('GET', 'cluster/nodes', SRR['generic_error']),
+ ('GET', 'cluster/nodes', SRR['zero_records']),
+ ('GET', 'cluster/nodes', SRR['node_309_updating']),
+ ('GET', 'cluster/nodes', SRR['node_310_online']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'name': 'sp_version',
+ 'conditions': ['firmware_version'],
+ 'state': 'absent',
+ 'attributes': {
+ 'node': 'node1',
+ 'expected_version': '3.09'
+ }
+ }
+ results = call_main(my_main, DEFAULT_ARGS, module_args)
+ assert results['msg'] == 'conditions not matched'
+ assert results['states'] == '3.09*4,3.10'
+ assert results['last_state'] == '3.10'
+
+
+@patch('time.sleep')
+def test_rest_negative_wait_for_sp_version_error(dont_sleep):
+ ''' Test negative sp_version check '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'cluster/nodes', SRR['zero_records']),
+ ('GET', 'cluster/nodes', SRR['zero_records']),
+ ('GET', 'cluster/nodes', SRR['zero_records']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'name': 'sp_version',
+ 'conditions': 'firmware_version',
+ }
+ error = 'Error: no record for node:'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+@patch('time.sleep')
+def test_rest_negative_wait_for_sp_version_timeout(dont_sleep):
+ ''' Test negative sp_version check '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ('GET', 'cluster/nodes', SRR['node_309_online']),
+ ('GET', 'cluster/nodes', SRR['node_309_online']),
+ ('GET', 'cluster/nodes', SRR['node_309_online']),
+ ('GET', 'cluster/nodes', SRR['node_309_online']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'name': 'sp_version',
+ 'conditions': 'firmware_version',
+ 'timeout': 40,
+ 'polling_interval': 12,
+ }
+ error = 'Error: timeout waiting for condition: firmware_version==3.10.'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+@patch('time.sleep')
+def test_zapi_successful_wait_for_sp_upgrade(dont_sleep):
+ ''' Test successful sp_upgrade check '''
+ register_responses([
+ ('ZAPI', 'service-processor-image-update-progress-get', ZRR['sp_image_update_progress_info_idle']),
+ ('ZAPI', 'service-processor-image-update-progress-get', ZRR['sp_image_update_progress_info_idle']),
+ ('ZAPI', 'service-processor-image-update-progress-get', ZRR['sp_image_update_progress_info_in_progress']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'name': 'sp_upgrade',
+ 'conditions': 'is_in_progress',
+ }
+ results = call_main(my_main, DEFAULT_ARGS, module_args)
+ assert results['msg'] == 'matched condition: is_in_progress'
+ assert results['states'] == 'false*2,true'
+ assert results['last_state'] == 'true'
+
+
+@patch('time.sleep')
+def test_zapi_successful_wait_for_sp_version(dont_sleep):
+ ''' Test successful sp_version check '''
+ register_responses([
+ ('ZAPI', 'service-processor-get', ZRR['sp_info_3_09']),
+ ('ZAPI', 'service-processor-get', ZRR['error']),
+ ('ZAPI', 'service-processor-get', ZRR['sp_info_3_09']),
+ ('ZAPI', 'service-processor-get', ZRR['sp_info_3_10']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'name': 'sp_version',
+ 'conditions': 'firmware_version',
+ }
+ results = call_main(my_main, DEFAULT_ARGS, module_args)
+ assert results['msg'] == 'matched condition: firmware_version'
+ assert results['states'] == '3.09*2,3.10'
+ assert results['last_state'] == '3.10'
+
+
+def test_zapi_negative_wait_for_snapmirror_relationship_error():
+ ''' Test negative snapmirror_relationship check '''
+ register_responses([
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'name': 'snapmirror_relationship',
+ 'conditions': 'state',
+ 'attributes': {
+ 'destination_path': 'path',
+ 'expected_state': 'snapmirrored'
+ }
+ }
+ error = 'Error: event snapmirror_relationship is not supported with ZAPI. It requires REST.'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+@patch('time.sleep')
+def test_zapi_negative_wait_for_sp_version_error(dont_sleep):
+ ''' Test negative sp_version check '''
+ register_responses([
+ ('ZAPI', 'service-processor-get', ZRR['no_records']),
+ ('ZAPI', 'service-processor-get', ZRR['no_records']),
+ ('ZAPI', 'service-processor-get', ZRR['no_records']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'name': 'sp_version',
+ 'conditions': 'firmware_version',
+ }
+ error = 'Error: Cannot find element with name: firmware-version in results:'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+@patch('time.sleep')
+def test_zapi_negative_wait_for_sp_version_timeout(dont_sleep):
+ ''' Test negative sp_version check '''
+ register_responses([
+ ('ZAPI', 'service-processor-get', ZRR['sp_info_3_09']),
+ ('ZAPI', 'service-processor-get', ZRR['error']),
+ ('ZAPI', 'service-processor-get', ZRR['sp_info_3_09']),
+ ('ZAPI', 'service-processor-get', ZRR['sp_info_3_09']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'name': 'sp_version',
+ 'conditions': 'firmware_version',
+ 'timeout': 30,
+ 'polling_interval': 9,
+ }
+ error = 'Error: timeout waiting for condition: firmware_version==3.10.'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_invalid_name():
+ ''' Test that name is valid '''
+ register_responses([
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'name': 'some_name',
+ 'conditions': 'firmware_version',
+ }
+ error = 'value of name must be one of:'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ module_args['use_rest'] = 'always'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_negative_validate_resource():
+ ''' KeyError on unexpected name '''
+ module_args = {
+ 'use_rest': 'never',
+ 'name': 'sp_version',
+ 'conditions': 'firmware_version',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert 'some_name' in expect_and_capture_ansible_exception(my_obj.validate_resource, KeyError, 'some_name')
+ module_args['use_rest'] = 'always'
+ assert 'some_name' in expect_and_capture_ansible_exception(my_obj.validate_resource, KeyError, 'some_name')
+
+
+def test_negative_build_zapi():
+ ''' KeyError on unexpected name '''
+ module_args = {
+ 'use_rest': 'never',
+ 'name': 'sp_version',
+ 'conditions': 'firmware_version',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert 'some_name' in expect_and_capture_ansible_exception(my_obj.build_zapi, KeyError, 'some_name')
+
+
+def test_negative_build_rest_api_kwargs():
+ ''' KeyError on unexpected name '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'name': 'sp_version',
+ 'conditions': 'firmware_version',
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ assert 'some_name' in expect_and_capture_ansible_exception(my_obj.build_rest_api_kwargs, KeyError, 'some_name')
+
+
+@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_wait_for_condition.NetAppONTAPWFC.get_record_rest')
+@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_wait_for_condition.NetAppONTAPWFC.extract_condition')
+def test_get_condition_other(mock_extract_condition, mock_get_record_rest):
+ ''' condition not found, non expected condition ignored '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ])
+ module_args = {
+ 'use_rest': 'always',
+ 'name': 'sp_version',
+ 'conditions': 'firmware_version',
+ 'state': 'absent'
+ }
+ my_obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ condition = 'other_condition'
+ mock_get_record_rest.return_value = None, None
+ mock_extract_condition.side_effect = [
+ (None, None),
+ (condition, None),
+ ]
+ assert my_obj.get_condition('name', 'dummy') == ('conditions not matched', None)
+ assert my_obj.get_condition('name', 'dummy') == ('conditions not matched: found other condition: %s' % condition, None)
+
+
+def test_invalid_condition():
+ ''' Test that condition is valid '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'name': 'sp_upgrade',
+ 'conditions': [
+ 'firmware_version',
+ 'some_condition'
+ ]
+ }
+ error = 'firmware_version is not valid for resource name: sp_upgrade'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+ module_args['use_rest'] = 'always'
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+# def test_invalid_attributes():
+def test_missing_attribute():
+ ''' Test that required attributes are present '''
+ register_responses([
+ ('GET', 'cluster', SRR['is_rest_97']),
+ ])
+ module_args = {
+ 'use_rest': 'never',
+ 'name': 'sp_version',
+ 'conditions': [
+ 'firmware_version',
+ ]
+ }
+ args = dict(DEFAULT_ARGS)
+ del args['attributes']
+ error = 'name is sp_version but all of the following are missing: attributes'
+ assert error in call_main(my_main, args, module_args, fail=True)['msg']
+ module_args['use_rest'] = 'always'
+ assert error in call_main(my_main, args, module_args, fail=True)['msg']
+ module_args['use_rest'] = 'never'
+ args['attributes'] = {'node': 'node1'}
+ error = 'Error: attributes: expected_version is required for resource name: sp_version'
+ assert error in call_main(my_main, args, module_args, fail=True)['msg']
+ module_args['use_rest'] = 'always'
+ assert error in call_main(my_main, args, module_args, fail=True)['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.has_netapp_lib')
+def test_negative_missing_netapp_lib(mock_netapp_lib):
+ ''' create cluster '''
+ module_args = {
+ 'use_rest': 'never',
+ 'name': 'sp_version',
+ 'conditions': 'firmware_version',
+ }
+ mock_netapp_lib.return_value = False
+ error = "the python NetApp-Lib module is required"
+ assert error in call_main(my_main, DEFAULT_ARGS, module_args, fail=True)['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_wwpn_alias.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_wwpn_alias.py
new file mode 100644
index 000000000..1ceece18c
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_wwpn_alias.py
@@ -0,0 +1,192 @@
+# (c) 2020, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests for Ansible module: na_ontap_wwpn_alias '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+
+from ansible_collections.netapp.ontap.tests.unit.compat import unittest
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_wwpn_alias \
+ import NetAppOntapWwpnAlias as alias_module # module under test
+
+# REST API canned responses when mocking send_request
+SRR = {
+ # common responses
+ 'is_rest': (200, {}, None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'end_of_sequence': (500, None, "Unexpected call to send_request"),
+ 'generic_error': (400, None, "Expected error"),
+ # module specific responses
+ 'get_alias': (
+ 200,
+ {"records": [{
+ "svm": {
+ "uuid": "uuid",
+ "name": "svm"},
+ "alias": "host1",
+ "wwpn": "01:02:03:04:0a:0b:0c:0d"}],
+ "num_records": 1}, None),
+ 'get_svm_uuid': (
+ 200,
+ {"records": [{
+ "uuid": "test_uuid"
+ }]}, None),
+ "no_record": (
+ 200,
+ {"num_records": 0},
+ None)
+}
+
+
+class TestMyModule(unittest.TestCase):
+ ''' Unit tests for na_ontap_wwpn_alias '''
+
+ def setUp(self):
+ self.mock_alias = {
+ 'name': 'host1',
+ 'vserver': 'test_vserver'
+ }
+
+ def mock_args(self):
+ return {
+ 'vserver': self.mock_alias['vserver'],
+ 'name': self.mock_alias['name'],
+ "wwpn": "01:02:03:04:0a:0b:0c:0d",
+ 'hostname': 'test_host',
+ 'username': 'test_user',
+ 'password': 'test_pass!'
+ }
+
+ def get_alias_mock_object(self):
+ alias_obj = alias_module()
+ return alias_obj
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_successful_create(self, mock_request):
+ '''Test successful rest create'''
+ data = self.mock_args()
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_svm_uuid'],
+ SRR['no_record'],
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_alias_mock_object().apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_create_idempotency(self, mock_request):
+ '''Test rest create idempotency'''
+ data = self.mock_args()
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_svm_uuid'],
+ SRR['get_alias'],
+ SRR['no_record'],
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_alias_mock_object().apply()
+ assert not exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_create_error(self, mock_request):
+ '''Test rest create error'''
+ data = self.mock_args()
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_svm_uuid'],
+ SRR['no_record'],
+ SRR['generic_error'],
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_alias_mock_object().apply()
+ assert exc.value.args[0]['msg'] == "Error on creating wwpn alias: Expected error."
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_modify(self, mock_request):
+ '''Test rest modify error'''
+ data = self.mock_args()
+ data['wwpn'] = "01:02:03:04:0a:0b:0c:0e"
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_svm_uuid'],
+ SRR['get_alias'],
+ SRR['empty_good'],
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleExitJson) as exc:
+ self.get_alias_mock_object().apply()
+ assert exc.value.args[0]['changed']
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_modify_error_delete(self, mock_request):
+ '''Test rest modify error'''
+ data = self.mock_args()
+ data['wwpn'] = "01:02:03:04:0a:0b:0c:0e"
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_svm_uuid'],
+ SRR['get_alias'],
+ SRR['generic_error'],
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_alias_mock_object().apply()
+ assert exc.value.args[0]['msg'] == "Error on modifying wwpn alias when trying to delete alias: Expected error."
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_modify_error_create(self, mock_request):
+ '''Test rest modify error'''
+ data = self.mock_args()
+ data['wwpn'] = "01:02:03:04:0a:0b:0c:0e"
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_svm_uuid'],
+ SRR['get_alias'],
+ SRR['empty_good'],
+ SRR['generic_error'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_alias_mock_object().apply()
+ assert exc.value.args[0]['msg'] == "Error on modifying wwpn alias when trying to re-create alias: Expected error."
+
+ @patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+ def test_rest_delete_error(self, mock_request):
+ '''Test rest delete error'''
+ data = self.mock_args()
+ data['state'] = 'absent'
+ set_module_args(data)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['get_svm_uuid'],
+ SRR['get_alias'],
+ SRR['generic_error'],
+ SRR['empty_good'],
+ SRR['end_of_sequence']
+ ]
+ with pytest.raises(AnsibleFailJson) as exc:
+ self.get_alias_mock_object().apply()
+ assert exc.value.args[0]['msg'] == "Error on deleting wwpn alias: Expected error."
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_zapit.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_zapit.py
new file mode 100644
index 000000000..fa64e1f88
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_na_ontap_zapit.py
@@ -0,0 +1,255 @@
+# (c) 2022, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit tests ONTAP Ansible module: na_ontap_zapit '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+
+import pytest
+import sys
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import\
+ expect_and_capture_ansible_exception, call_main, create_module, create_and_apply, patch_ansible
+from ansible_collections.netapp.ontap.tests.unit.framework.mock_rest_and_zapi_requests import\
+ patch_request_and_invoke, register_responses
+from ansible_collections.netapp.ontap.tests.unit.framework.zapi_factory import build_zapi_response, zapi_responses
+
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_zapit \
+ import NetAppONTAPZapi as my_module, main as my_main # module under test
+
+if not netapp_utils.has_netapp_lib():
+ pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
+
+
+def cluster_image_info():
+ version = 'Fattire__9.3.0'
+ return {
+ 'num-records': 1,
+ # composite response, attributes-list for cluster-image-get-iter and attributes for cluster-image-get
+ 'attributes-list': [
+ {'cluster-image-info': {
+ 'node-id': 'node4test',
+ 'current-version': version}},
+ {'cluster-image-info': {
+ 'node-id': 'node4test',
+ 'current-version': version}},
+ ],
+ 'attributes': {
+ 'cluster-image-info': {
+ 'node-id': 'node4test',
+ 'current-version': version
+ }},
+ }
+
+
+def build_zapi_error_custom(errno, reason, results='results'):
+ ''' build an XML response
+ errno as int
+ reason as str
+ '''
+ if not netapp_utils.has_netapp_lib():
+ return 'build_zapi_error: netapp-lib is missing', 'invalid'
+ if results != 'results':
+ return (netapp_utils.zapi.NaElement(results), 'valid')
+ xml = {}
+ if errno is not None:
+ xml['errorno'] = errno
+ if reason is not None:
+ xml['reason'] = reason
+ response = netapp_utils.zapi.NaElement('results')
+ if xml:
+ response.translate_struct(xml)
+ return (response, 'valid')
+
+
+ZRR = zapi_responses({
+ 'cluster_image_info': build_zapi_response(cluster_image_info()),
+ 'error_no_errno': build_zapi_error_custom(None, 'some reason'),
+ 'error_no_reason': build_zapi_error_custom(18408, None),
+ 'error_no_results': build_zapi_error_custom(None, None, 'no_results')
+})
+
+
+DEFAULT_ARGS = {
+ 'hostname': 'hostname',
+ 'username': 'username',
+ 'password': 'password',
+ 'zapi': {'cluster-image-get-iter': None}
+}
+
+
+def test_ensure_zapi_called_cluster():
+ register_responses([
+
+ ('ZAPI', 'cluster-image-get-iter', ZRR['cluster_image_info']),
+ ])
+ module_args = {
+ "use_rest": "never",
+ }
+ assert create_and_apply(my_module, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_ensure_zapi_called_vserver():
+ register_responses([
+ ('ZAPI', 'cluster-image-get-iter', ZRR['cluster_image_info']),
+ ])
+ module_args = {
+ "use_rest": "never",
+ "vserver": "vserver",
+ "zapi": {'cluster-image-get-iter': {'attributes': None}}
+ }
+ assert call_main(my_main, DEFAULT_ARGS, module_args)['changed']
+
+
+def test_negative_zapi_called_attributes():
+ register_responses([
+ ('ZAPI', 'cluster-image-get-iter', ZRR['error']),
+ ])
+ module_args = {
+ "use_rest": "never",
+ }
+ exception = create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)
+ assert exception['msg'] == 'ZAPI failure: check errno and reason.'
+ assert exception['errno'] == '12345'
+ assert exception['reason'] == 'synthetic error for UT purpose'
+
+
+def test_negative_zapi_called_element_no_errno():
+ register_responses([
+ ('ZAPI', 'cluster-image-get-iter', ZRR['error_no_errno']),
+ ])
+ module_args = {
+ "use_rest": "never",
+ }
+ exception = create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)
+ assert exception['msg'] == 'ZAPI failure: check errno and reason.'
+ assert exception['errno'] == 'ESTATUSFAILED'
+ assert exception['reason'] == 'some reason'
+
+
+def test_negative_zapi_called_element_no_reason():
+ register_responses([
+ ('ZAPI', 'cluster-image-get-iter', ZRR['error_no_reason']),
+ ])
+ module_args = {
+ "use_rest": "never",
+ }
+ exception = create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)
+ assert exception['msg'] == 'ZAPI failure: check errno and reason.'
+ assert exception['errno'] == '18408'
+ assert exception['reason'] == 'Execution failure with unknown reason.'
+
+
+def test_negative_zapi_unexpected_error():
+ register_responses([
+ ('ZAPI', 'cluster-image-get-iter', (netapp_utils.zapi.NaApiError(), 'valid')),
+ ])
+ module_args = {
+ "use_rest": "never",
+ }
+ exception = create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)
+ assert exception['msg'] == "Error running zapi cluster-image-get-iter: NetApp API failed. Reason - unknown:unknown"
+
+
+def test_negative_two_zapis():
+ register_responses([
+ ])
+ module_args = {
+ "use_rest": "never",
+ "zapi": {"1": 1, "2": 2}
+ }
+ exception = create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)
+ assert 'A single ZAPI can be called at a time, received: ' in exception['msg']
+
+
+def test_negative_bad_zapi_type():
+ register_responses([
+ ])
+ module_args = {
+ "use_rest": "never",
+ }
+ obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ obj.zapi = "1"
+ error = 'A directory entry is expected, eg: system-get-version: , received: 1'
+ assert expect_and_capture_ansible_exception(obj.run_zapi, 'fail')['msg'] == error
+ obj.zapi = [3, 1]
+ error = 'A directory entry is expected, eg: system-get-version: , received: [3, 1]'
+ assert expect_and_capture_ansible_exception(obj.run_zapi, 'fail')['msg'] == error
+
+
+# python 2.7 does not have bytes but str
+BYTES_MARKER_BEGIN = "b'" if sys.version_info >= (3, 0) else ''
+BYTES_MARKER_END = "'" if sys.version_info >= (3, 0) else ''
+BYTES_TYPE = 'bytes' if sys.version_info >= (3, 0) else 'str'
+
+
+def test_negative_zapi_called_element_no_results():
+ register_responses([
+ ('ZAPI', 'cluster-image-get-iter', ZRR['error_no_results']),
+ ])
+ module_args = {
+ "use_rest": "never",
+ }
+ error = "Error running zapi, no results field: %s<no_results/>" % BYTES_MARKER_BEGIN
+ assert error in create_and_apply(my_module, DEFAULT_ARGS, module_args, fail=True)['msg']
+
+
+def test_negative_bad_zapi_response_to_string():
+ module_args = {
+ "use_rest": "never",
+ }
+ obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ error = "Error running zapi in to_string: '%s' object has no attribute 'to_string'" % BYTES_TYPE
+ assert expect_and_capture_ansible_exception(obj.jsonify_and_parse_output, 'fail', b'<bad_xml')['msg'] == error
+
+
+class xml_mock:
+ def __init__(self, data, action=None):
+ self.data = data
+ self.action = action
+
+ def to_string(self):
+ if self.action == 'raise_on_to_string':
+ raise ValueError(self.data)
+ return self.data
+
+
+def test_negative_bad_zapi_response_bad_xml():
+ module_args = {
+ "use_rest": "never",
+ }
+ obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ xml = xml_mock(b'<bad_xml')
+ error = "Error running zapi in xmltodict: %s<bad_xml%s: unclosed token" % (BYTES_MARKER_BEGIN, BYTES_MARKER_END)
+ assert error in expect_and_capture_ansible_exception(obj.jsonify_and_parse_output, 'fail', xml)['msg']
+
+
+def test_negative_bad_zapi_response_bad_json():
+ # TODO: need to find some valid XML that cannot be converted in JSON. Is it possible?
+ module_args = {
+ "use_rest": "never",
+ }
+ obj = create_module(my_module, DEFAULT_ARGS, module_args)
+ xml = xml_mock(b'<bad_json><elemX-1>elem_value</elemX-1><elem-2>elem_value</elem-2></bad_json>')
+ error = "Error running zapi, no results field"
+ assert error in expect_and_capture_ansible_exception(obj.jsonify_and_parse_output, 'fail', xml)['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.has_netapp_lib')
+def test_fail_netapp_lib_error(mock_has_netapp_lib):
+ mock_has_netapp_lib.return_value = False
+ assert 'Error: the python NetApp-Lib module is required. Import error: None' == call_main(my_main, DEFAULT_ARGS, fail=True)['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_zapit.HAS_JSON', False)
+def test_fail_netapp_lib_error():
+ assert 'the python json module is required' == call_main(my_main, DEFAULT_ARGS, fail=True)['msg']
+
+
+@patch('ansible_collections.netapp.ontap.plugins.modules.na_ontap_zapit.HAS_XMLTODICT', False)
+def test_fail_netapp_lib_error():
+ assert 'the python xmltodict module is required' == call_main(my_main, DEFAULT_ARGS, fail=True)['msg']
diff --git a/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_ontap_fdspt.py b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_ontap_fdspt.py
new file mode 100644
index 000000000..80512768e
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/plugins/modules/test_ontap_fdspt.py
@@ -0,0 +1,164 @@
+# (c) 2021, NetApp, Inc
+# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
+
+''' unit test for ONTAP na_ontap_fdspt Ansible module '''
+
+from __future__ import (absolute_import, division, print_function)
+__metaclass__ = type
+import pytest
+import sys
+
+from ansible_collections.netapp.ontap.tests.unit.compat.mock import patch
+import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
+# pylint: disable=unused-import
+from ansible_collections.netapp.ontap.tests.unit.plugins.module_utils.ansible_mocks import set_module_args,\
+ AnsibleFailJson, AnsibleExitJson, patch_ansible
+
+from ansible_collections.netapp.ontap.plugins.modules.na_ontap_fdspt \
+ import NetAppOntapFDSPT as my_module # module under test
+
+
+if not netapp_utils.HAS_REQUESTS and sys.version_info < (2, 7):
+ pytestmark = pytest.mark.skip('Skipping Unit Tests on 2.6 as requests is not available')
+
+
+def default_args():
+ args = {
+ 'name': 'policy1',
+ 'vserver': 'vserver1',
+ 'hostname': '10.10.10.10',
+ 'username': 'username',
+ 'password': 'password',
+ 'use_rest': 'always',
+ 'ntfs_mode': 'ignore',
+ 'security_type': 'ntfs',
+ 'path': '/'
+ }
+ return args
+
+
+# REST API canned responses when mocking send_request
+SRR = {
+ # common responses
+ 'is_rest': (200, dict(version=dict(generation=9, major=9, minor=0, full='dummy')), None),
+ 'is_rest_9_8': (200, dict(version=dict(generation=9, major=8, minor=0, full='dummy')), None),
+ 'is_zapi': (400, {}, "Unreachable"),
+ 'empty_good': (200, {}, None),
+ 'zero_record': (200, dict(records=[], num_records=0), None),
+ 'one_record_uuid': (200, dict(records=[dict(uuid='a1b2c3')], num_records=1), None),
+ 'end_of_sequence': (500, None, "Unexpected call to send_request"),
+ 'generic_error': (400, None, "Expected error"),
+ 'policy_task_record': (
+ 200, {
+ 'records': [{
+ 'vserver': 'vserver1',
+ 'policy_name': 'policy1',
+ 'index_num': 1,
+ 'path': '/',
+ 'security_type': 'ntfs',
+ 'ntfs_mode': 'ignore',
+ 'access_control': 'file_directory'}],
+ 'num_records': 1},
+ None),
+}
+
+
+def test_module_fail_when_required_args_missing(patch_ansible):
+ ''' required arguments are reported as errors '''
+ with pytest.raises(AnsibleFailJson) as exc:
+ set_module_args({})
+ my_module()
+ print('Info: %s' % exc.value.args[0]['msg'])
+
+
+def test_rest_missing_arguments(patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' test missing arguements '''
+ args = dict(default_args())
+ del args['hostname']
+ set_module_args(args)
+ with pytest.raises(AnsibleFailJson) as exc:
+ my_module()
+ msg = 'missing required arguments: hostname'
+ assert exc.value.args[0]['msg'] == msg
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_create(mock_request, patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' Create security policies'''
+ args = dict(default_args())
+ args['name'] = 'new_policy_task'
+ print(args)
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['zero_record'],
+ SRR['empty_good'], # create
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed']
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 3
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_remove(mock_request, patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' remove Security policies '''
+ args = dict(default_args())
+ args['state'] = 'absent'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['policy_task_record'],
+ SRR['empty_good'], # delete
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is True
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 3
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_modify(mock_request, patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' remove Security policies '''
+ args = dict(default_args())
+ args['state'] = 'present'
+ args['name'] = 'policy1'
+ args['ntfs_mode'] = 'replace'
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['policy_task_record'],
+ SRR['empty_good'], # delete
+ SRR['empty_good'], # add
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is True
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 4
+
+
+@patch('ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI.send_request')
+def test_rest_no_action(mock_request, patch_ansible): # pylint: disable=redefined-outer-name,unused-argument
+ ''' Idempotent test '''
+ args = dict(default_args())
+ set_module_args(args)
+ mock_request.side_effect = [
+ SRR['is_rest'],
+ SRR['policy_task_record'],
+ SRR['end_of_sequence']
+ ]
+ my_obj = my_module()
+ with pytest.raises(AnsibleExitJson) as exc:
+ my_obj.apply()
+ assert exc.value.args[0]['changed'] is False
+ print(mock_request.mock_calls)
+ assert len(mock_request.mock_calls) == 2
diff --git a/ansible_collections/netapp/ontap/tests/unit/requirements.txt b/ansible_collections/netapp/ontap/tests/unit/requirements.txt
new file mode 100644
index 000000000..290e4346d
--- /dev/null
+++ b/ansible_collections/netapp/ontap/tests/unit/requirements.txt
@@ -0,0 +1,7 @@
+ipaddress ; python_version >= '2.7'
+isodate ; python_version >= '2.7'
+netapp-lib ; python_version >= '2.7'
+requests ; python_version >= '2.7'
+six ; python_version >= '2.7'
+solidfire-sdk-python ; python_version >= '2.7'
+xmltodict ; python_version >= '2.7'